var/home/core/zuul-output/0000755000175000017500000000000015117154256014534 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015117167767015513 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003064317415117167761017721 0ustar rootrootDec 13 03:11:46 crc systemd[1]: Starting Kubernetes Kubelet... Dec 13 03:11:46 crc restorecon[4763]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:46 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:47 crc restorecon[4763]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 03:11:47 crc restorecon[4763]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 13 03:11:47 crc kubenswrapper[5070]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 03:11:47 crc kubenswrapper[5070]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 13 03:11:47 crc kubenswrapper[5070]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 03:11:47 crc kubenswrapper[5070]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 03:11:47 crc kubenswrapper[5070]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 13 03:11:47 crc kubenswrapper[5070]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.970588 5070 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975325 5070 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975361 5070 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975369 5070 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975374 5070 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975379 5070 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975384 5070 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975390 5070 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975395 5070 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975399 5070 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975404 5070 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975410 5070 feature_gate.go:330] unrecognized feature gate: Example Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975416 5070 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975422 5070 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975434 5070 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975454 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975460 5070 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975464 5070 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975474 5070 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975480 5070 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975487 5070 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975755 5070 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975764 5070 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975770 5070 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975775 5070 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975781 5070 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975794 5070 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975799 5070 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975804 5070 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975810 5070 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975817 5070 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975824 5070 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975829 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975835 5070 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975840 5070 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975847 5070 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975852 5070 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975857 5070 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975869 5070 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975875 5070 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975880 5070 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975885 5070 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975890 5070 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975894 5070 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975899 5070 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.975907 5070 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976047 5070 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976058 5070 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976152 5070 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976158 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976163 5070 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976167 5070 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976171 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976176 5070 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976180 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976184 5070 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976188 5070 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976191 5070 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976197 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976201 5070 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976205 5070 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976208 5070 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976212 5070 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976215 5070 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976219 5070 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976230 5070 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976236 5070 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976241 5070 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976245 5070 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976249 5070 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976257 5070 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.976261 5070 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976796 5070 flags.go:64] FLAG: --address="0.0.0.0" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976812 5070 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976820 5070 flags.go:64] FLAG: --anonymous-auth="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976827 5070 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976833 5070 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976837 5070 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976844 5070 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976850 5070 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976855 5070 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976859 5070 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976864 5070 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976868 5070 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976875 5070 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976880 5070 flags.go:64] FLAG: --cgroup-root="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976885 5070 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976889 5070 flags.go:64] FLAG: --client-ca-file="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976894 5070 flags.go:64] FLAG: --cloud-config="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976898 5070 flags.go:64] FLAG: --cloud-provider="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976902 5070 flags.go:64] FLAG: --cluster-dns="[]" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976909 5070 flags.go:64] FLAG: --cluster-domain="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976914 5070 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976918 5070 flags.go:64] FLAG: --config-dir="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976923 5070 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976927 5070 flags.go:64] FLAG: --container-log-max-files="5" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976933 5070 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976938 5070 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976942 5070 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976946 5070 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976951 5070 flags.go:64] FLAG: --contention-profiling="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976955 5070 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976959 5070 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976964 5070 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976968 5070 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976974 5070 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976978 5070 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976983 5070 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976987 5070 flags.go:64] FLAG: --enable-load-reader="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976991 5070 flags.go:64] FLAG: --enable-server="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.976995 5070 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977000 5070 flags.go:64] FLAG: --event-burst="100" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977005 5070 flags.go:64] FLAG: --event-qps="50" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977010 5070 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977015 5070 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977019 5070 flags.go:64] FLAG: --eviction-hard="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977026 5070 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977030 5070 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977035 5070 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977039 5070 flags.go:64] FLAG: --eviction-soft="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977044 5070 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977048 5070 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977052 5070 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977056 5070 flags.go:64] FLAG: --experimental-mounter-path="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977061 5070 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977065 5070 flags.go:64] FLAG: --fail-swap-on="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977069 5070 flags.go:64] FLAG: --feature-gates="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977087 5070 flags.go:64] FLAG: --file-check-frequency="20s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977092 5070 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977096 5070 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977100 5070 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977105 5070 flags.go:64] FLAG: --healthz-port="10248" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977109 5070 flags.go:64] FLAG: --help="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977113 5070 flags.go:64] FLAG: --hostname-override="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977118 5070 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977122 5070 flags.go:64] FLAG: --http-check-frequency="20s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977127 5070 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977130 5070 flags.go:64] FLAG: --image-credential-provider-config="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977135 5070 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977139 5070 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977143 5070 flags.go:64] FLAG: --image-service-endpoint="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977147 5070 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977152 5070 flags.go:64] FLAG: --kube-api-burst="100" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977156 5070 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977160 5070 flags.go:64] FLAG: --kube-api-qps="50" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977165 5070 flags.go:64] FLAG: --kube-reserved="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977169 5070 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977172 5070 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977178 5070 flags.go:64] FLAG: --kubelet-cgroups="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977182 5070 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977186 5070 flags.go:64] FLAG: --lock-file="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977190 5070 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977194 5070 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977198 5070 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977205 5070 flags.go:64] FLAG: --log-json-split-stream="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977211 5070 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977215 5070 flags.go:64] FLAG: --log-text-split-stream="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977220 5070 flags.go:64] FLAG: --logging-format="text" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977224 5070 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977229 5070 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977233 5070 flags.go:64] FLAG: --manifest-url="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977237 5070 flags.go:64] FLAG: --manifest-url-header="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977243 5070 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977247 5070 flags.go:64] FLAG: --max-open-files="1000000" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977253 5070 flags.go:64] FLAG: --max-pods="110" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977257 5070 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977262 5070 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977266 5070 flags.go:64] FLAG: --memory-manager-policy="None" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977271 5070 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977275 5070 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977280 5070 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977284 5070 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977297 5070 flags.go:64] FLAG: --node-status-max-images="50" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977302 5070 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977306 5070 flags.go:64] FLAG: --oom-score-adj="-999" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977311 5070 flags.go:64] FLAG: --pod-cidr="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977315 5070 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977321 5070 flags.go:64] FLAG: --pod-manifest-path="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977326 5070 flags.go:64] FLAG: --pod-max-pids="-1" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977331 5070 flags.go:64] FLAG: --pods-per-core="0" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977336 5070 flags.go:64] FLAG: --port="10250" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977341 5070 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977346 5070 flags.go:64] FLAG: --provider-id="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977350 5070 flags.go:64] FLAG: --qos-reserved="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977356 5070 flags.go:64] FLAG: --read-only-port="10255" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977360 5070 flags.go:64] FLAG: --register-node="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977364 5070 flags.go:64] FLAG: --register-schedulable="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977368 5070 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977376 5070 flags.go:64] FLAG: --registry-burst="10" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977380 5070 flags.go:64] FLAG: --registry-qps="5" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977385 5070 flags.go:64] FLAG: --reserved-cpus="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977390 5070 flags.go:64] FLAG: --reserved-memory="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977396 5070 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977401 5070 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977406 5070 flags.go:64] FLAG: --rotate-certificates="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977410 5070 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977415 5070 flags.go:64] FLAG: --runonce="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977419 5070 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977424 5070 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977429 5070 flags.go:64] FLAG: --seccomp-default="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977434 5070 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977454 5070 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977459 5070 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977464 5070 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977469 5070 flags.go:64] FLAG: --storage-driver-password="root" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977473 5070 flags.go:64] FLAG: --storage-driver-secure="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977478 5070 flags.go:64] FLAG: --storage-driver-table="stats" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977482 5070 flags.go:64] FLAG: --storage-driver-user="root" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977488 5070 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977492 5070 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977497 5070 flags.go:64] FLAG: --system-cgroups="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977501 5070 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977509 5070 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977513 5070 flags.go:64] FLAG: --tls-cert-file="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977517 5070 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977654 5070 flags.go:64] FLAG: --tls-min-version="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977659 5070 flags.go:64] FLAG: --tls-private-key-file="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977663 5070 flags.go:64] FLAG: --topology-manager-policy="none" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977668 5070 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977673 5070 flags.go:64] FLAG: --topology-manager-scope="container" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977677 5070 flags.go:64] FLAG: --v="2" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977683 5070 flags.go:64] FLAG: --version="false" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977690 5070 flags.go:64] FLAG: --vmodule="" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977695 5070 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.977699 5070 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977820 5070 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977825 5070 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977830 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977834 5070 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977838 5070 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977842 5070 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977851 5070 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977854 5070 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977858 5070 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977861 5070 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977865 5070 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977869 5070 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977872 5070 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977876 5070 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977879 5070 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977883 5070 feature_gate.go:330] unrecognized feature gate: Example Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977887 5070 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977892 5070 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977896 5070 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977916 5070 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977921 5070 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977926 5070 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977930 5070 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977935 5070 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977939 5070 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977943 5070 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977946 5070 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977950 5070 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977953 5070 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977958 5070 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977962 5070 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977966 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977969 5070 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977973 5070 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977976 5070 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977980 5070 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977985 5070 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977988 5070 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.977996 5070 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978000 5070 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978004 5070 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978007 5070 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978011 5070 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978015 5070 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978019 5070 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978023 5070 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978026 5070 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978030 5070 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978033 5070 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978037 5070 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978040 5070 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978044 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978047 5070 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978051 5070 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978054 5070 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978059 5070 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978064 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978068 5070 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978073 5070 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978077 5070 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978081 5070 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978086 5070 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978090 5070 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978093 5070 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978097 5070 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978101 5070 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978105 5070 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978109 5070 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978114 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978119 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.978126 5070 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.978260 5070 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.987198 5070 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.987251 5070 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987421 5070 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987436 5070 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987471 5070 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987482 5070 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987492 5070 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987500 5070 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987510 5070 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987519 5070 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987528 5070 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987536 5070 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987545 5070 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987553 5070 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987561 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987570 5070 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987579 5070 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987587 5070 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987596 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987604 5070 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987612 5070 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987621 5070 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987629 5070 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987638 5070 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987646 5070 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987684 5070 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987697 5070 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987710 5070 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987719 5070 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987727 5070 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987736 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987744 5070 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987754 5070 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987764 5070 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987773 5070 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987781 5070 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987793 5070 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987802 5070 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987810 5070 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987818 5070 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987827 5070 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987836 5070 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987848 5070 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987859 5070 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987869 5070 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987879 5070 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987888 5070 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987898 5070 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987907 5070 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987915 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987924 5070 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987932 5070 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987941 5070 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987949 5070 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987958 5070 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987969 5070 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987979 5070 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987989 5070 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.987999 5070 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988008 5070 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988018 5070 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988027 5070 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988037 5070 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988047 5070 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988060 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988071 5070 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988082 5070 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988091 5070 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988100 5070 feature_gate.go:330] unrecognized feature gate: Example Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988108 5070 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988118 5070 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988127 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988139 5070 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 03:11:47 crc kubenswrapper[5070]: I1213 03:11:47.988154 5070 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988411 5070 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988424 5070 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988434 5070 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988468 5070 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988478 5070 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988487 5070 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988496 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988505 5070 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988515 5070 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988525 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988534 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988543 5070 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988552 5070 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988561 5070 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988571 5070 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988579 5070 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988588 5070 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988597 5070 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988606 5070 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988616 5070 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988645 5070 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988657 5070 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988668 5070 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988677 5070 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988689 5070 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988699 5070 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 03:11:47 crc kubenswrapper[5070]: W1213 03:11:47.988710 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988721 5070 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988730 5070 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988740 5070 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988750 5070 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988759 5070 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988767 5070 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988777 5070 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988788 5070 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988797 5070 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988807 5070 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988815 5070 feature_gate.go:330] unrecognized feature gate: Example Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988824 5070 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988833 5070 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988845 5070 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988857 5070 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988867 5070 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988877 5070 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988887 5070 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988898 5070 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988909 5070 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988920 5070 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988929 5070 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988938 5070 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988949 5070 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988958 5070 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988969 5070 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988981 5070 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.988992 5070 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989004 5070 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989015 5070 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989025 5070 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989034 5070 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989043 5070 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989053 5070 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989062 5070 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989071 5070 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989079 5070 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989088 5070 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989097 5070 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989106 5070 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989115 5070 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989124 5070 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989133 5070 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:47.989144 5070 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:47.989158 5070 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:47.989579 5070 server.go:940] "Client rotation is on, will bootstrap in background" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:47.994818 5070 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:47.995015 5070 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:47.995966 5070 server.go:997] "Starting client certificate rotation" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:47.996012 5070 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:47.996399 5070 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-05 00:52:30.57777274 +0000 UTC Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:47.996634 5070 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 549h40m42.581147471s for next certificate rotation Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.004469 5070 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.008225 5070 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.022292 5070 log.go:25] "Validated CRI v1 runtime API" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.038776 5070 log.go:25] "Validated CRI v1 image API" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.041161 5070 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.046764 5070 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-13-03-07-31-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.046829 5070 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.079263 5070 manager.go:217] Machine: {Timestamp:2025-12-13 03:11:48.077116286 +0000 UTC m=+0.312959912 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:e9aee69c-6c9b-424e-9f99-9beaad60efce BootID:a2f84c95-0f97-4e73-a87e-b72c87d9f029 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:46:ae:c9 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:46:ae:c9 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:70:ca:18 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:da:be:9a Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:b0:7e:56 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:5e:cd:86 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:63:02:bf Speed:-1 Mtu:1496} {Name:eth10 MacAddress:d6:aa:0a:26:e9:42 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:4a:aa:ec:fa:ef:39 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.079728 5070 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.079963 5070 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.081033 5070 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.081561 5070 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.081647 5070 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.082018 5070 topology_manager.go:138] "Creating topology manager with none policy" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.082038 5070 container_manager_linux.go:303] "Creating device plugin manager" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.082376 5070 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.082475 5070 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.082950 5070 state_mem.go:36] "Initialized new in-memory state store" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.083180 5070 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.084328 5070 kubelet.go:418] "Attempting to sync node with API server" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.084381 5070 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.084505 5070 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.084538 5070 kubelet.go:324] "Adding apiserver pod source" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.084567 5070 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.087566 5070 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.088326 5070 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:48.088378 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:48.088495 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.088618 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.088619 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.089599 5070 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090385 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090431 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090477 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090494 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090520 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090535 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090551 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090577 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090596 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090615 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090635 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090649 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.090928 5070 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.091583 5070 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.092395 5070 server.go:1280] "Started kubelet" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.092994 5070 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.092993 5070 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.093792 5070 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 13 03:11:48 crc systemd[1]: Started Kubernetes Kubelet. Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.095703 5070 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.095726 5070 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.096171 5070 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 10:22:16.168108785 +0000 UTC Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.096729 5070 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.096745 5070 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.096789 5070 server.go:460] "Adding debug handlers to kubelet server" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.097379 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="200ms" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.098055 5070 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.102243 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.097265 5070 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.227:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1880a7c294d5b3f5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 03:11:48.091761653 +0000 UTC m=+0.327605239,LastTimestamp:2025-12-13 03:11:48.091761653 +0000 UTC m=+0.327605239,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:48.102689 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.102756 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.104720 5070 factory.go:153] Registering CRI-O factory Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.104750 5070 factory.go:221] Registration of the crio container factory successfully Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.104820 5070 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.104833 5070 factory.go:55] Registering systemd factory Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.104842 5070 factory.go:221] Registration of the systemd container factory successfully Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.104871 5070 factory.go:103] Registering Raw factory Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.104894 5070 manager.go:1196] Started watching for new ooms in manager Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.112378 5070 manager.go:319] Starting recovery of all containers Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.119796 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.120142 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.120277 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.120434 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.120674 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.120801 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.120923 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.121045 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.121189 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.121366 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.121528 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.121659 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.121784 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.121909 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.122049 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.122183 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.122304 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.122480 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.122625 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.122773 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.122911 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.123047 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.123178 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.123311 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.123469 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.123651 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.123791 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.123945 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.124089 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.124289 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.124648 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.124797 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.124974 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.125135 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.125405 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.125640 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.125777 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.125908 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126037 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126167 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126282 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126373 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126477 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126571 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126655 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126736 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126825 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126906 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.126986 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.127065 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.127182 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.127277 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.127373 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.127475 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.127564 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.127645 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.127740 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.127854 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.127942 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128031 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128120 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128201 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128287 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128379 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128483 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128567 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128647 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128744 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128827 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128909 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.128990 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.129072 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.129180 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.129274 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.129387 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.129567 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.129650 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.129727 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.129817 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.129905 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.129986 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.130068 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.130148 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.130242 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.130327 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.130405 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.130505 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.130591 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.130681 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.131933 5070 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.132071 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.132161 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.132269 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.132366 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.132477 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.132580 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.132660 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.132758 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.132848 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.132939 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.133024 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.133103 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.133192 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.133284 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.133406 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.133668 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.133789 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.133875 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.133960 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.134094 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.134183 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.134335 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.134468 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.134574 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.134727 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.135057 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.135161 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.135267 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.135435 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.135580 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.135715 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.135817 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.135935 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.136024 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.136111 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.136324 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.136411 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.136534 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.136615 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.136701 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.136827 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.136929 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.137019 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.137121 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.137205 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.137323 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.137417 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.137634 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.137745 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.137838 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.137950 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.138064 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.138172 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.138569 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.138659 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.138740 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.138881 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.139197 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.139347 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.139461 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.139551 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.139631 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.139761 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.139859 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.139968 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.140078 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.140175 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.140268 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.140379 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.140731 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.140829 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.140958 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.141199 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.141403 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.141576 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.141686 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.141802 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.142087 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.142209 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.142347 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.142523 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.142804 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.142911 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.143008 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.143089 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.143281 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.143694 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.143791 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.144101 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.144467 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.144638 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.144772 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.144896 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.145023 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.145156 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.145300 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.145537 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.145645 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.145755 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.145880 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.146086 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.146250 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.146376 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.146564 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.146733 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.146845 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.146935 5070 manager.go:324] Recovery completed Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.147034 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.147154 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.147284 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.147408 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.147555 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.147747 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.147912 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.148040 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.148165 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.148277 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.148383 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.149337 5070 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.149419 5070 reconstruct.go:97] "Volume reconstruction finished" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.149465 5070 reconciler.go:26] "Reconciler: start to sync state" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.161992 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.162247 5070 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.164610 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.164659 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.164676 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.165254 5070 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.165577 5070 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.165648 5070 kubelet.go:2335] "Starting kubelet main sync loop" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.165683 5070 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.165711 5070 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.165738 5070 state_mem.go:36] "Initialized new in-memory state store" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.165891 5070 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 13 03:11:48 crc kubenswrapper[5070]: W1213 03:11:48.166687 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.166788 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.202953 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.267023 5070 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.298973 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="400ms" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.303095 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.403242 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.467943 5070 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.503859 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.604508 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.700407 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="800ms" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.705471 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.797897 5070 policy_none.go:49] "None policy: Start" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.799178 5070 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.799225 5070 state_mem.go:35] "Initializing new in-memory state store" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.805940 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.868811 5070 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.899892 5070 manager.go:334] "Starting Device Plugin manager" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.899963 5070 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.899978 5070 server.go:79] "Starting device plugin registration server" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.900569 5070 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.900591 5070 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.900839 5070 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.900925 5070 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 13 03:11:48 crc kubenswrapper[5070]: I1213 03:11:48.900942 5070 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 13 03:11:48 crc kubenswrapper[5070]: E1213 03:11:48.906931 5070 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.001138 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.002803 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.002905 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.002925 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.002970 5070 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 03:11:49 crc kubenswrapper[5070]: E1213 03:11:49.003783 5070 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.227:6443: connect: connection refused" node="crc" Dec 13 03:11:49 crc kubenswrapper[5070]: W1213 03:11:49.020957 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:49 crc kubenswrapper[5070]: E1213 03:11:49.021079 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:49 crc kubenswrapper[5070]: W1213 03:11:49.021094 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:49 crc kubenswrapper[5070]: E1213 03:11:49.021229 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.093400 5070 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:49 crc kubenswrapper[5070]: W1213 03:11:49.093390 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:49 crc kubenswrapper[5070]: E1213 03:11:49.093557 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.096887 5070 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 05:20:36.145008805 +0000 UTC Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.096941 5070 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 314h8m47.048070218s for next certificate rotation Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.204513 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.205738 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.205773 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.205786 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.205811 5070 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 03:11:49 crc kubenswrapper[5070]: E1213 03:11:49.206215 5070 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.227:6443: connect: connection refused" node="crc" Dec 13 03:11:49 crc kubenswrapper[5070]: E1213 03:11:49.501671 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="1.6s" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.607148 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.609560 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.609611 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.609627 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.609665 5070 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 03:11:49 crc kubenswrapper[5070]: E1213 03:11:49.610268 5070 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.227:6443: connect: connection refused" node="crc" Dec 13 03:11:49 crc kubenswrapper[5070]: W1213 03:11:49.640056 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:49 crc kubenswrapper[5070]: E1213 03:11:49.640124 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.669563 5070 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.669719 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.671069 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.671104 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.671120 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.671305 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.671641 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.671779 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.672026 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.672059 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.672072 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.672237 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.672395 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.672466 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.672735 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.672766 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.672778 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.673225 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.673254 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.673266 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.673372 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.673488 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.673538 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674053 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674091 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674050 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674129 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674142 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674104 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674351 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674369 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674378 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674406 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674501 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.674530 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.675423 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.675467 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.675486 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.675501 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.675521 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.675529 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.675626 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.675649 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.676491 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.676526 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.676539 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770275 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770313 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770334 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770351 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770368 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770386 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770402 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770418 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770434 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770526 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770683 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770744 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770839 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770904 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.770970 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872492 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872571 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872590 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872609 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872626 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872649 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872665 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872682 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872700 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872719 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872737 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872753 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872783 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872837 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872858 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872851 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.872888 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873108 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873142 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873177 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873177 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873199 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873224 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873231 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873266 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873263 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873257 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873320 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873275 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:49 crc kubenswrapper[5070]: I1213 03:11:49.873101 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.012395 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.030393 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.043183 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:50 crc kubenswrapper[5070]: W1213 03:11:50.046122 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-f5777aedeb100a4018f6619f2831fc4f6faaf3028bb4298280f006df8649f097 WatchSource:0}: Error finding container f5777aedeb100a4018f6619f2831fc4f6faaf3028bb4298280f006df8649f097: Status 404 returned error can't find the container with id f5777aedeb100a4018f6619f2831fc4f6faaf3028bb4298280f006df8649f097 Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.065482 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 03:11:50 crc kubenswrapper[5070]: W1213 03:11:50.067332 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-5f8f34fe69df38a834d6a7cb0eecf6e8f46ac6412f909fab3653af3e4441fe8e WatchSource:0}: Error finding container 5f8f34fe69df38a834d6a7cb0eecf6e8f46ac6412f909fab3653af3e4441fe8e: Status 404 returned error can't find the container with id 5f8f34fe69df38a834d6a7cb0eecf6e8f46ac6412f909fab3653af3e4441fe8e Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.073862 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 03:11:50 crc kubenswrapper[5070]: W1213 03:11:50.089083 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-da10927a0747be6841634210b174c21b27ea41a88cfa32079ca725f3c5353f45 WatchSource:0}: Error finding container da10927a0747be6841634210b174c21b27ea41a88cfa32079ca725f3c5353f45: Status 404 returned error can't find the container with id da10927a0747be6841634210b174c21b27ea41a88cfa32079ca725f3c5353f45 Dec 13 03:11:50 crc kubenswrapper[5070]: W1213 03:11:50.091653 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-8616bc383d273d0b7c9cc8c528c7654a71b005f26a1f2d591c2ad5452811dcb1 WatchSource:0}: Error finding container 8616bc383d273d0b7c9cc8c528c7654a71b005f26a1f2d591c2ad5452811dcb1: Status 404 returned error can't find the container with id 8616bc383d273d0b7c9cc8c528c7654a71b005f26a1f2d591c2ad5452811dcb1 Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.092273 5070 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.172942 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"da10927a0747be6841634210b174c21b27ea41a88cfa32079ca725f3c5353f45"} Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.174302 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5f8f34fe69df38a834d6a7cb0eecf6e8f46ac6412f909fab3653af3e4441fe8e"} Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.175167 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9a4df80d408ee22873b8ec32d3c0b738f1077e08450cadf8da93ed9fceb9477d"} Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.176056 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f5777aedeb100a4018f6619f2831fc4f6faaf3028bb4298280f006df8649f097"} Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.178087 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"8616bc383d273d0b7c9cc8c528c7654a71b005f26a1f2d591c2ad5452811dcb1"} Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.410573 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.411680 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.411721 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.411734 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:50 crc kubenswrapper[5070]: I1213 03:11:50.411765 5070 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 03:11:50 crc kubenswrapper[5070]: E1213 03:11:50.412391 5070 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.227:6443: connect: connection refused" node="crc" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.094931 5070 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:51 crc kubenswrapper[5070]: E1213 03:11:51.102793 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="3.2s" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.186071 5070 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7109bc65c6c19a1a3a5e88f3b4b9f8815aa08a2157a8d957f4909422ed29f400" exitCode=0 Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.186278 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.186490 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7109bc65c6c19a1a3a5e88f3b4b9f8815aa08a2157a8d957f4909422ed29f400"} Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.192727 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.192767 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.192780 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.194570 5070 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="80fbcd6ed6bda77c9112b2d8f8e63a629a727b5f84e7a79dd7745283830a5cca" exitCode=0 Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.194639 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.194721 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"80fbcd6ed6bda77c9112b2d8f8e63a629a727b5f84e7a79dd7745283830a5cca"} Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.195531 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.195563 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.195574 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.197525 5070 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="a0212b04d81c7d20fdcacfbfa8757f2a7c62a8a65d02ecb92e10335481d00013" exitCode=0 Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.197639 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.197618 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"a0212b04d81c7d20fdcacfbfa8757f2a7c62a8a65d02ecb92e10335481d00013"} Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.198828 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.198852 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.198863 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.200050 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3a598d49ebe10cda296cbb779c7bd5cf606e827af564ac0ae813eb7ab0cfca9b"} Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.200072 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"32c7d4f0ad60bf4cac9e70945fd023c2bbe79250fac7118c090d735ad2965c6d"} Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.201586 5070 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367" exitCode=0 Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.201634 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367"} Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.201715 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.202889 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.202922 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.202933 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.214041 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.216878 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.216922 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:51 crc kubenswrapper[5070]: I1213 03:11:51.216941 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:51 crc kubenswrapper[5070]: W1213 03:11:51.495030 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:51 crc kubenswrapper[5070]: E1213 03:11:51.495175 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:51 crc kubenswrapper[5070]: W1213 03:11:51.540807 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:51 crc kubenswrapper[5070]: E1213 03:11:51.540908 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:51 crc kubenswrapper[5070]: W1213 03:11:51.854956 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:51 crc kubenswrapper[5070]: E1213 03:11:51.855052 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.012793 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.014727 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.014791 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.014801 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.014828 5070 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 03:11:52 crc kubenswrapper[5070]: E1213 03:11:52.015421 5070 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.227:6443: connect: connection refused" node="crc" Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.092812 5070 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.206491 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"43350ca69ace891078b304f722e18e3bb897227db032f20bc060516caebf72fe"} Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.208730 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605"} Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.211738 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6645193c794cab584d8b627d0a55cd3682c03d93e98565dab28c5a38c8c06b69"} Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.213935 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2692bd4b1be09ff4ea1ab524ba9f9d1f104c8a7d11b1b9b27ac68e5b169d8e19"} Dec 13 03:11:52 crc kubenswrapper[5070]: I1213 03:11:52.215983 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2bd9c6c98eba31d8faf9e65711fd66a292ad0a74e9103142982cdb0bb5232a3b"} Dec 13 03:11:52 crc kubenswrapper[5070]: W1213 03:11:52.304987 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:52 crc kubenswrapper[5070]: E1213 03:11:52.305419 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Dec 13 03:11:53 crc kubenswrapper[5070]: E1213 03:11:53.072565 5070 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.227:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1880a7c294d5b3f5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 03:11:48.091761653 +0000 UTC m=+0.327605239,LastTimestamp:2025-12-13 03:11:48.091761653 +0000 UTC m=+0.327605239,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.093231 5070 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.221604 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ae3ccc8e4e5fc8cf1bdd87599e40edb02236a05c8698aa097a4181020f60da1a"} Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.221693 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.222958 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.223005 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.223017 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.224394 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f"} Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.226582 5070 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6645193c794cab584d8b627d0a55cd3682c03d93e98565dab28c5a38c8c06b69" exitCode=0 Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.226674 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6645193c794cab584d8b627d0a55cd3682c03d93e98565dab28c5a38c8c06b69"} Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.226703 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.226800 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.227535 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.227568 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.227578 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.228250 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.228301 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:53 crc kubenswrapper[5070]: I1213 03:11:53.228318 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:54 crc kubenswrapper[5070]: I1213 03:11:54.231803 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:54 crc kubenswrapper[5070]: I1213 03:11:54.232366 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d49b9f0284ab42bf1b27888589c4ddf79ea1d4278f7d1454767a2cf967557769"} Dec 13 03:11:54 crc kubenswrapper[5070]: I1213 03:11:54.233092 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:54 crc kubenswrapper[5070]: I1213 03:11:54.233139 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:54 crc kubenswrapper[5070]: I1213 03:11:54.233151 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.216277 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.217879 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.217930 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.217950 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.217985 5070 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.238410 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"019edfef910bf347c8599283900165320b26343b749f9eec254fc1fd392f903c"} Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.361330 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.361633 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.363363 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.363413 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:55 crc kubenswrapper[5070]: I1213 03:11:55.363430 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:56 crc kubenswrapper[5070]: I1213 03:11:56.245645 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5e5773c7f877f9c94ea245e380549100a64f7cf19848e78a97803ddc707f0051"} Dec 13 03:11:56 crc kubenswrapper[5070]: I1213 03:11:56.249883 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba"} Dec 13 03:11:56 crc kubenswrapper[5070]: I1213 03:11:56.252537 5070 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="019edfef910bf347c8599283900165320b26343b749f9eec254fc1fd392f903c" exitCode=0 Dec 13 03:11:56 crc kubenswrapper[5070]: I1213 03:11:56.252581 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"019edfef910bf347c8599283900165320b26343b749f9eec254fc1fd392f903c"} Dec 13 03:11:56 crc kubenswrapper[5070]: I1213 03:11:56.252740 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:56 crc kubenswrapper[5070]: I1213 03:11:56.254020 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:56 crc kubenswrapper[5070]: I1213 03:11:56.254051 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:56 crc kubenswrapper[5070]: I1213 03:11:56.254064 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.261616 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cd6cdb068faf5f763aec54b08a8597572a72fbc5685b112558c444fda675eeb1"} Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.261660 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673"} Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.261710 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.262570 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.262609 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.262621 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.264517 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.264489 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"52c1ab07edb249d0355bfaadc3520f19c660857a965958b159d760f538c6367d"} Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.264704 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d329fa487c154d7b113b153f42f1cdcbe0bd73f814e5791818b04d8a119b1c91"} Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.265291 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.265320 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.265330 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.646765 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.647033 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.648849 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.648953 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.648969 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:58 crc kubenswrapper[5070]: I1213 03:11:58.651957 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:11:58 crc kubenswrapper[5070]: E1213 03:11:58.907237 5070 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.271194 5070 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.271244 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.271716 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"784215c27fe4c6494362b10af5542c7031f301597de10045913cabdbceee1bfa"} Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.271752 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ba25ae28e5110c171bc54b323b790250a7f5a1fc8f34a05c8deae0ae703552f3"} Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.271812 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.272417 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.272464 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.272474 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.273069 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.273088 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.273095 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.548614 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.548994 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.551301 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.551355 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.551368 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.628545 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:11:59 crc kubenswrapper[5070]: I1213 03:11:59.770075 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.001890 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.279021 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b0c0c137172a25cb0b4fefe9c586e6630dcf37eb0cdcc72e94ca58dcf6f35391"} Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.279128 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.279156 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.279342 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.280562 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.280588 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.280598 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.280568 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.280676 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.280685 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.281290 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.281355 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.281368 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:00 crc kubenswrapper[5070]: I1213 03:12:00.286829 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.060277 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.280776 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.280855 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.280897 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.282592 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.282624 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.282637 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.282720 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.282754 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.282766 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.283168 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.283200 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.283207 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:01 crc kubenswrapper[5070]: I1213 03:12:01.427860 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.283098 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.283258 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.284700 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.284771 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.284823 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.284872 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.284909 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.284922 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.324074 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.324327 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.325981 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.326046 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.326066 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.771151 5070 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 03:12:02 crc kubenswrapper[5070]: I1213 03:12:02.771274 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 03:12:04 crc kubenswrapper[5070]: I1213 03:12:04.094048 5070 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 13 03:12:04 crc kubenswrapper[5070]: E1213 03:12:04.303609 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="6.4s" Dec 13 03:12:04 crc kubenswrapper[5070]: I1213 03:12:04.457578 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 13 03:12:04 crc kubenswrapper[5070]: I1213 03:12:04.457804 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:04 crc kubenswrapper[5070]: I1213 03:12:04.459327 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:04 crc kubenswrapper[5070]: I1213 03:12:04.459371 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:04 crc kubenswrapper[5070]: I1213 03:12:04.459387 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:05 crc kubenswrapper[5070]: E1213 03:12:05.219655 5070 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 13 03:12:05 crc kubenswrapper[5070]: W1213 03:12:05.390267 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 13 03:12:05 crc kubenswrapper[5070]: I1213 03:12:05.390409 5070 trace.go:236] Trace[277314463]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 03:11:55.387) (total time: 10002ms): Dec 13 03:12:05 crc kubenswrapper[5070]: Trace[277314463]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (03:12:05.390) Dec 13 03:12:05 crc kubenswrapper[5070]: Trace[277314463]: [10.002516631s] [10.002516631s] END Dec 13 03:12:05 crc kubenswrapper[5070]: E1213 03:12:05.390501 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 13 03:12:05 crc kubenswrapper[5070]: W1213 03:12:05.641484 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 13 03:12:05 crc kubenswrapper[5070]: I1213 03:12:05.641643 5070 trace.go:236] Trace[516445748]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 03:11:55.639) (total time: 10001ms): Dec 13 03:12:05 crc kubenswrapper[5070]: Trace[516445748]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (03:12:05.641) Dec 13 03:12:05 crc kubenswrapper[5070]: Trace[516445748]: [10.001677083s] [10.001677083s] END Dec 13 03:12:05 crc kubenswrapper[5070]: E1213 03:12:05.641680 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 13 03:12:06 crc kubenswrapper[5070]: W1213 03:12:06.450245 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 13 03:12:06 crc kubenswrapper[5070]: I1213 03:12:06.450846 5070 trace.go:236] Trace[1257030006]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 03:11:56.448) (total time: 10002ms): Dec 13 03:12:06 crc kubenswrapper[5070]: Trace[1257030006]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (03:12:06.450) Dec 13 03:12:06 crc kubenswrapper[5070]: Trace[1257030006]: [10.002487295s] [10.002487295s] END Dec 13 03:12:06 crc kubenswrapper[5070]: E1213 03:12:06.450886 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 13 03:12:08 crc kubenswrapper[5070]: W1213 03:12:08.193744 5070 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 13 03:12:08 crc kubenswrapper[5070]: I1213 03:12:08.193863 5070 trace.go:236] Trace[472871391]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 03:11:58.192) (total time: 10001ms): Dec 13 03:12:08 crc kubenswrapper[5070]: Trace[472871391]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (03:12:08.193) Dec 13 03:12:08 crc kubenswrapper[5070]: Trace[472871391]: [10.001576506s] [10.001576506s] END Dec 13 03:12:08 crc kubenswrapper[5070]: E1213 03:12:08.193890 5070 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 13 03:12:08 crc kubenswrapper[5070]: E1213 03:12:08.908113 5070 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 13 03:12:09 crc kubenswrapper[5070]: I1213 03:12:09.629566 5070 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded" start-of-body= Dec 13 03:12:09 crc kubenswrapper[5070]: I1213 03:12:09.629663 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded" Dec 13 03:12:11 crc kubenswrapper[5070]: I1213 03:12:11.357279 5070 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 13 03:12:11 crc kubenswrapper[5070]: I1213 03:12:11.357587 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 13 03:12:11 crc kubenswrapper[5070]: I1213 03:12:11.620418 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:11 crc kubenswrapper[5070]: I1213 03:12:11.621927 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:11 crc kubenswrapper[5070]: I1213 03:12:11.621993 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:11 crc kubenswrapper[5070]: I1213 03:12:11.622003 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:11 crc kubenswrapper[5070]: I1213 03:12:11.622040 5070 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 03:12:12 crc kubenswrapper[5070]: I1213 03:12:12.771867 5070 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 03:12:12 crc kubenswrapper[5070]: I1213 03:12:12.771976 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.494093 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.494363 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.496108 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.496172 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.496189 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.510875 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.636794 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.637030 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.638654 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.638710 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.638734 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:14 crc kubenswrapper[5070]: I1213 03:12:14.645386 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:12:15 crc kubenswrapper[5070]: I1213 03:12:15.250048 5070 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 13 03:12:15 crc kubenswrapper[5070]: I1213 03:12:15.321194 5070 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 03:12:15 crc kubenswrapper[5070]: I1213 03:12:15.321255 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:15 crc kubenswrapper[5070]: I1213 03:12:15.321351 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:15 crc kubenswrapper[5070]: I1213 03:12:15.322358 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:15 crc kubenswrapper[5070]: I1213 03:12:15.322858 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:15 crc kubenswrapper[5070]: I1213 03:12:15.322894 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:15 crc kubenswrapper[5070]: I1213 03:12:15.323198 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:15 crc kubenswrapper[5070]: I1213 03:12:15.323249 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:15 crc kubenswrapper[5070]: I1213 03:12:15.323268 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.372037 5070 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.403234 5070 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:53938->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.403292 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:53938->192.168.126.11:17697: read: connection reset by peer" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.403234 5070 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:39846->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.403352 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:39846->192.168.126.11:17697: read: connection reset by peer" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.403629 5070 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.403676 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.640890 5070 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.641223 5070 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 13 03:12:16 crc kubenswrapper[5070]: E1213 03:12:16.641244 5070 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": node \"crc\" not found" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.648181 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.648237 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.648252 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.648275 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.648291 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:16Z","lastTransitionTime":"2025-12-13T03:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:16 crc kubenswrapper[5070]: E1213 03:12:16.663906 5070 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a2f84c95-0f97-4e73-a87e-b72c87d9f029\\\",\\\"systemUUID\\\":\\\"e9aee69c-6c9b-424e-9f99-9beaad60efce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.670038 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.670089 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.670100 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.670119 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.670131 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:16Z","lastTransitionTime":"2025-12-13T03:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:16 crc kubenswrapper[5070]: E1213 03:12:16.682794 5070 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a2f84c95-0f97-4e73-a87e-b72c87d9f029\\\",\\\"systemUUID\\\":\\\"e9aee69c-6c9b-424e-9f99-9beaad60efce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.688049 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.688117 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.688132 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.688155 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.688169 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:16Z","lastTransitionTime":"2025-12-13T03:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:16 crc kubenswrapper[5070]: E1213 03:12:16.701231 5070 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a2f84c95-0f97-4e73-a87e-b72c87d9f029\\\",\\\"systemUUID\\\":\\\"e9aee69c-6c9b-424e-9f99-9beaad60efce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.704881 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.704926 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.704942 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.704964 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.704981 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:16Z","lastTransitionTime":"2025-12-13T03:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:16 crc kubenswrapper[5070]: E1213 03:12:16.716693 5070 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a2f84c95-0f97-4e73-a87e-b72c87d9f029\\\",\\\"systemUUID\\\":\\\"e9aee69c-6c9b-424e-9f99-9beaad60efce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:16 crc kubenswrapper[5070]: E1213 03:12:16.716817 5070 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 03:12:16 crc kubenswrapper[5070]: E1213 03:12:16.716844 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:16 crc kubenswrapper[5070]: E1213 03:12:16.817203 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:16 crc kubenswrapper[5070]: I1213 03:12:16.875939 5070 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 13 03:12:16 crc kubenswrapper[5070]: E1213 03:12:16.918765 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:17 crc kubenswrapper[5070]: E1213 03:12:17.019364 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:17 crc kubenswrapper[5070]: E1213 03:12:17.119539 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:17 crc kubenswrapper[5070]: E1213 03:12:17.220671 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:17 crc kubenswrapper[5070]: E1213 03:12:17.321590 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:17 crc kubenswrapper[5070]: I1213 03:12:17.329130 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 13 03:12:17 crc kubenswrapper[5070]: I1213 03:12:17.332507 5070 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cd6cdb068faf5f763aec54b08a8597572a72fbc5685b112558c444fda675eeb1" exitCode=255 Dec 13 03:12:17 crc kubenswrapper[5070]: I1213 03:12:17.332566 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"cd6cdb068faf5f763aec54b08a8597572a72fbc5685b112558c444fda675eeb1"} Dec 13 03:12:17 crc kubenswrapper[5070]: I1213 03:12:17.332791 5070 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 03:12:17 crc kubenswrapper[5070]: I1213 03:12:17.334175 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:17 crc kubenswrapper[5070]: I1213 03:12:17.334248 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:17 crc kubenswrapper[5070]: I1213 03:12:17.334268 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:17 crc kubenswrapper[5070]: I1213 03:12:17.335421 5070 scope.go:117] "RemoveContainer" containerID="cd6cdb068faf5f763aec54b08a8597572a72fbc5685b112558c444fda675eeb1" Dec 13 03:12:17 crc kubenswrapper[5070]: E1213 03:12:17.422281 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:17 crc kubenswrapper[5070]: E1213 03:12:17.522678 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:17 crc kubenswrapper[5070]: E1213 03:12:17.623793 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:17 crc kubenswrapper[5070]: E1213 03:12:17.724371 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:17 crc kubenswrapper[5070]: E1213 03:12:17.824984 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:17 crc kubenswrapper[5070]: E1213 03:12:17.925731 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:18 crc kubenswrapper[5070]: E1213 03:12:18.026793 5070 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.104775 5070 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.135796 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.135895 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.135922 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.135958 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.135983 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:18Z","lastTransitionTime":"2025-12-13T03:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.239888 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.239959 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.239983 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.240013 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.240038 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:18Z","lastTransitionTime":"2025-12-13T03:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.340423 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.342564 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.342704 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.342782 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.342876 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.342942 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:18Z","lastTransitionTime":"2025-12-13T03:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.344130 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41"} Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.344539 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.445861 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.445926 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.445952 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.445983 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.446006 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:18Z","lastTransitionTime":"2025-12-13T03:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.583769 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.583820 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.583833 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.583852 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.583863 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:18Z","lastTransitionTime":"2025-12-13T03:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.687114 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.687180 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.687200 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.687221 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.687236 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:18Z","lastTransitionTime":"2025-12-13T03:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.790214 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.790254 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.790264 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.790277 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.790286 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:18Z","lastTransitionTime":"2025-12-13T03:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.893932 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.893995 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.894008 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.894029 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.894042 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:18Z","lastTransitionTime":"2025-12-13T03:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.987561 5070 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.997524 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.997597 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.997622 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.997657 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:18 crc kubenswrapper[5070]: I1213 03:12:18.997682 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:18Z","lastTransitionTime":"2025-12-13T03:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.098418 5070 apiserver.go:52] "Watching apiserver" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.101408 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.101520 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.101612 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.101663 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.101685 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:19Z","lastTransitionTime":"2025-12-13T03:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.102270 5070 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.102738 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.103285 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.103400 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.103624 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.103663 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.103819 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.103834 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.104056 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.104263 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.104354 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.107710 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.107800 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.108059 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.108345 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.108666 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.108752 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.111082 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.111125 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.111174 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.157923 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.174332 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.190047 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.203821 5070 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.205559 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.205603 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.205620 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.205644 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.205660 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:19Z","lastTransitionTime":"2025-12-13T03:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.209626 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e688eb2-80cc-47e6-9b7b-748d6e6175ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd6cdb068faf5f763aec54b08a8597572a72fbc5685b112558c444fda675eeb1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 03:12:08.179364 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:08.180879 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2146923830/tls.crt::/tmp/serving-cert-2146923830/tls.key\\\\\\\"\\\\nI1213 03:12:16.380881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:16.383343 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:16.383365 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:16.383398 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:16.383404 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:16.389728 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:16.389835 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:16.389872 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:16.389895 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:16.389935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1213 03:12:16.389735 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1213 03:12:16.389972 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:16.390048 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1213 03:12:16.395221 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T03:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.225743 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.240153 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.257201 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.272260 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293664 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293721 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293749 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293774 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293807 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293838 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293861 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293883 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293906 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293932 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293956 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.293980 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294004 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294026 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294048 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294069 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294088 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294357 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294111 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294558 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294584 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294606 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294627 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294643 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294657 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294673 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294692 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294742 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294871 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.294981 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295052 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295054 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295162 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295261 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295235 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295367 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295416 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295899 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295930 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295957 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295984 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296007 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296030 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296055 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296089 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296111 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296134 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296156 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296178 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296202 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296226 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296250 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296277 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296299 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296364 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296386 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296411 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296429 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296465 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296490 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296511 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296532 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296552 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296570 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296590 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296610 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296632 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296655 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296677 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296698 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296750 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296867 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296897 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296921 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296947 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296969 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296993 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297017 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297037 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297055 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297076 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297098 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297119 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297142 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297164 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297187 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297211 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297231 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297257 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297277 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297297 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297325 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297350 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297406 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297434 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297477 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297501 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297528 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297732 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297756 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297784 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297807 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297833 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297860 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297884 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297905 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299878 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299907 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299934 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300813 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300843 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300871 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300895 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300918 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.301050 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.301870 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.301899 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.301926 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302003 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302029 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302051 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302960 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303311 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303340 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303356 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303380 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303403 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303428 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303476 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303502 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303527 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303550 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303574 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303596 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303622 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303650 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303675 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303698 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303720 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303744 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303768 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303792 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303814 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303837 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303861 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303885 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303912 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303937 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303958 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303984 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304006 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304028 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304051 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304077 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304102 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304127 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304156 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304180 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304204 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304228 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304250 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304272 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304295 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304321 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304347 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304372 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304395 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304418 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304492 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304523 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304546 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304570 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304591 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304613 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304637 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304659 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304683 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304723 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304748 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304775 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304798 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304822 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304846 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304866 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304887 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304911 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304940 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304965 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304991 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305015 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305041 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305064 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305087 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305164 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305190 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305214 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305237 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305258 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305282 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305303 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305330 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305358 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305382 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305408 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305512 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305638 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305685 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305729 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305770 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305812 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305849 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305888 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305927 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305966 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306009 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306055 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306102 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306144 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306183 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306266 5070 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306292 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306317 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306341 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306364 5070 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306388 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306410 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306432 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306513 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306577 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.307514 5070 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.307860 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.309260 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.320892 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.320923 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.320932 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.320949 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.320959 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:19Z","lastTransitionTime":"2025-12-13T03:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.323920 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.331395 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295557 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.333001 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295660 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.295947 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296128 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296147 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296170 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.333296 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.333308 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.334799 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.336354 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:19.836317106 +0000 UTC m=+32.072160682 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296257 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296416 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296589 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.296762 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297228 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297268 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.297906 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.298189 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.298359 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.298569 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.298821 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.298877 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.298906 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.298897 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299323 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299368 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299471 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299488 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299365 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299502 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299518 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299799 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.299897 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300069 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300080 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300188 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300192 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300403 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.300780 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.301026 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.301053 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.301143 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.301220 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.301239 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.301559 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302087 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302100 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302137 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302177 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302395 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302428 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302433 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302654 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302810 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.302862 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303281 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303485 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303528 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303831 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.303841 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304017 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304493 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304615 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304662 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304730 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304901 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304899 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.304705 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305095 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305308 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305333 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305707 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.305924 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306046 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306179 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306187 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.306510 5070 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306558 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306617 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306660 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.306706 5070 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306735 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306888 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.306983 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.307897 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.308182 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.308335 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.308481 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.308400 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.309034 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.309779 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.310214 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.310260 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.310353 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.310555 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.310688 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.310866 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.310916 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.310955 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.311137 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.320599 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.320613 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.320834 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.325297 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.326185 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.326359 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.326380 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.326797 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.326857 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.326915 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.327639 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.328480 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.328690 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.329060 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.329138 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.329401 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.329691 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.329716 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.330008 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.330016 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.330098 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.330417 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.330631 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.331092 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.331224 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.331574 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.331659 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.346494 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.346518 5070 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.331688 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.331758 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.346587 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.346598 5070 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.332077 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.332287 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.332406 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.332411 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.332655 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.332767 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.332918 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.347765 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:19.847716377 +0000 UTC m=+32.083560043 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.348307 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:19.848269011 +0000 UTC m=+32.084112597 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.348388 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.348430 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:19.848404365 +0000 UTC m=+32.084248001 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.348327 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.348749 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:19.848732863 +0000 UTC m=+32.084576429 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.350049 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.350192 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.350372 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.350640 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.350774 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.350975 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.351283 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.351692 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.351911 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.353222 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.353407 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.353571 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.354593 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.354905 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.355655 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.355967 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.356081 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.356350 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.356817 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.357151 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.357565 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.357658 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.359557 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.360305 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.363079 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.363076 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.363505 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.363788 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.363796 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.363901 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.363948 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.364659 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.364361 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.364841 5070 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41" exitCode=255 Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.364879 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41"} Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.364932 5070 scope.go:117] "RemoveContainer" containerID="cd6cdb068faf5f763aec54b08a8597572a72fbc5685b112558c444fda675eeb1" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.365177 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.365283 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.365357 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.366237 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.366364 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.366882 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.367546 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.371721 5070 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.372552 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.372845 5070 scope.go:117] "RemoveContainer" containerID="bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.373089 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.373184 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.371374 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.373642 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.374374 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.374434 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.374721 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.374758 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.375235 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.376960 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.376993 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.377133 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.389138 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.391283 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.393010 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.396653 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.400903 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.403029 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407210 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407291 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407406 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407435 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407497 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407525 5070 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407553 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407582 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407608 5070 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407646 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407689 5070 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407648 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407804 5070 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407836 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407854 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407871 5070 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407888 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407905 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407926 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407944 5070 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407962 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407979 5070 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.407995 5070 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408013 5070 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408031 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408050 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408069 5070 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408085 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408101 5070 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408120 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408137 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408154 5070 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408170 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408187 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408205 5070 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408222 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408239 5070 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408256 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408272 5070 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408288 5070 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408306 5070 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408324 5070 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408342 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408360 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408377 5070 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408394 5070 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408410 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408427 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408475 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408501 5070 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408520 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408537 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408554 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408571 5070 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408587 5070 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408604 5070 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408621 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408637 5070 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408656 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408704 5070 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408721 5070 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408738 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408756 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408772 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408789 5070 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408806 5070 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408822 5070 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408838 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408855 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408873 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408889 5070 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408905 5070 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408922 5070 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408940 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408957 5070 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408976 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.408992 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409011 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409028 5070 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409045 5070 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409061 5070 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409078 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409094 5070 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409110 5070 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409127 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409144 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409163 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409180 5070 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409198 5070 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409214 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409231 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409247 5070 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409263 5070 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409280 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409325 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409343 5070 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409362 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409381 5070 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409397 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409413 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409430 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409481 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409504 5070 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409521 5070 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409539 5070 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409555 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409572 5070 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409589 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409606 5070 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409622 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409640 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409657 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409675 5070 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409692 5070 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409710 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409726 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409744 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409761 5070 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409778 5070 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409795 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409811 5070 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409828 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409844 5070 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409863 5070 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409880 5070 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409895 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409914 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409930 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409949 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409966 5070 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409983 5070 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.409998 5070 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410015 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410031 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410047 5070 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410064 5070 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410081 5070 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410097 5070 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410116 5070 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410132 5070 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410150 5070 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410166 5070 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410183 5070 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410199 5070 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410216 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410235 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410253 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410270 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410287 5070 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410304 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410320 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410337 5070 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410354 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410372 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410389 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410406 5070 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410423 5070 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410471 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410498 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410517 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410535 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410552 5070 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410568 5070 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410585 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410603 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410620 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410636 5070 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410654 5070 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410671 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410688 5070 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410705 5070 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410721 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410737 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410754 5070 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410771 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410787 5070 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410804 5070 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410821 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410838 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410855 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410872 5070 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410888 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410904 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410921 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410938 5070 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410955 5070 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410973 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.410990 5070 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.417806 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.423634 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.423684 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.423700 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.423721 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.423737 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:19Z","lastTransitionTime":"2025-12-13T03:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.430632 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.432509 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.435702 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.441084 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.447371 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.447974 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e688eb2-80cc-47e6-9b7b-748d6e6175ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd6cdb068faf5f763aec54b08a8597572a72fbc5685b112558c444fda675eeb1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 03:12:08.179364 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:08.180879 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2146923830/tls.crt::/tmp/serving-cert-2146923830/tls.key\\\\\\\"\\\\nI1213 03:12:16.380881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:16.383343 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:16.383365 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:16.383398 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:16.383404 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:16.389728 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:16.389835 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:16.389872 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:16.389895 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:16.389935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1213 03:12:16.389735 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1213 03:12:16.389972 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:16.390048 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1213 03:12:16.395221 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:18Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 03:12:18.048782 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 03:12:18.048910 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:18.049664 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2169446048/tls.crt::/tmp/serving-cert-2169446048/tls.key\\\\\\\"\\\\nI1213 03:12:18.331471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:18.333517 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:18.333571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:18.333621 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:18.333650 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:18.339654 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:18.339723 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339796 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339862 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:18.339884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 03:12:18.340001 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:18.340065 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 03:12:18.339662 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 03:12:18.345582 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T03:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: W1213 03:12:19.454974 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-ecdef69736ec5c8e73ddeb6751245003bd7fb2a32c2ddea4cf3ccd8ae29f6e2b WatchSource:0}: Error finding container ecdef69736ec5c8e73ddeb6751245003bd7fb2a32c2ddea4cf3ccd8ae29f6e2b: Status 404 returned error can't find the container with id ecdef69736ec5c8e73ddeb6751245003bd7fb2a32c2ddea4cf3ccd8ae29f6e2b Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.460838 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.469919 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.559926 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.560406 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.560418 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.560451 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.560466 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:19Z","lastTransitionTime":"2025-12-13T03:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.664829 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.664879 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.664895 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.664920 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.664939 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:19Z","lastTransitionTime":"2025-12-13T03:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.767845 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.767883 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.767895 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.767912 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.767923 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:19Z","lastTransitionTime":"2025-12-13T03:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.775484 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.778776 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.786211 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.787991 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.802509 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.815276 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e688eb2-80cc-47e6-9b7b-748d6e6175ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd6cdb068faf5f763aec54b08a8597572a72fbc5685b112558c444fda675eeb1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 03:12:08.179364 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:08.180879 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2146923830/tls.crt::/tmp/serving-cert-2146923830/tls.key\\\\\\\"\\\\nI1213 03:12:16.380881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:16.383343 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:16.383365 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:16.383398 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:16.383404 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:16.389728 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:16.389835 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:16.389872 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:16.389895 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:16.389935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1213 03:12:16.389735 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1213 03:12:16.389972 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:16.390048 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1213 03:12:16.395221 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:18Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 03:12:18.048782 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 03:12:18.048910 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:18.049664 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2169446048/tls.crt::/tmp/serving-cert-2169446048/tls.key\\\\\\\"\\\\nI1213 03:12:18.331471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:18.333517 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:18.333571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:18.333621 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:18.333650 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:18.339654 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:18.339723 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339796 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339862 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:18.339884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 03:12:18.340001 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:18.340065 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 03:12:18.339662 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 03:12:18.345582 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T03:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.825938 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.836686 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.847972 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.859023 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.870764 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.870782 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.870815 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.870913 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.870930 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.870942 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:19Z","lastTransitionTime":"2025-12-13T03:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.882072 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.896184 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e688eb2-80cc-47e6-9b7b-748d6e6175ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd6cdb068faf5f763aec54b08a8597572a72fbc5685b112558c444fda675eeb1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 03:12:08.179364 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:08.180879 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2146923830/tls.crt::/tmp/serving-cert-2146923830/tls.key\\\\\\\"\\\\nI1213 03:12:16.380881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:16.383343 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:16.383365 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:16.383398 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:16.383404 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:16.389728 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:16.389835 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:16.389872 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:16.389895 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:16.389935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1213 03:12:16.389735 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1213 03:12:16.389972 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:16.390048 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1213 03:12:16.395221 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:18Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 03:12:18.048782 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 03:12:18.048910 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:18.049664 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2169446048/tls.crt::/tmp/serving-cert-2169446048/tls.key\\\\\\\"\\\\nI1213 03:12:18.331471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:18.333517 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:18.333571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:18.333621 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:18.333650 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:18.339654 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:18.339723 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339796 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339862 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:18.339884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 03:12:18.340001 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:18.340065 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 03:12:18.339662 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 03:12:18.345582 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T03:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:19Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.913755 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d28ac43-3a1f-4f2b-aba8-0177f11d672f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a598d49ebe10cda296cbb779c7bd5cf606e827af564ac0ae813eb7ab0cfca9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c7d4f0ad60bf4cac9e70945fd023c2bbe79250fac7118c090d735ad2965c6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43350ca69ace891078b304f722e18e3bb897227db032f20bc060516caebf72fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ccc8e4e5fc8cf1bdd87599e40edb02236a05c8698aa097a4181020f60da1a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:19Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.914986 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.915086 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.915130 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915202 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:20.915172387 +0000 UTC m=+33.151015953 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915246 5070 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915258 5070 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915300 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:20.91528997 +0000 UTC m=+33.151133516 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.915301 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915345 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:20.915318681 +0000 UTC m=+33.151162297 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.915388 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915411 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915430 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915464 5070 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915508 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:20.915497375 +0000 UTC m=+33.151340931 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915653 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915687 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915704 5070 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:19 crc kubenswrapper[5070]: E1213 03:12:19.915745 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:20.915732461 +0000 UTC m=+33.151576027 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.927824 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:19Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.940931 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:19Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.952215 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:19Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.967856 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:19Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.973835 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.973895 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.973914 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.973937 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:19 crc kubenswrapper[5070]: I1213 03:12:19.973954 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:19Z","lastTransitionTime":"2025-12-13T03:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.076635 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.076677 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.076687 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.076701 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.076711 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:20Z","lastTransitionTime":"2025-12-13T03:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.170837 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.171469 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.172972 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.173725 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.174877 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.175461 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.176174 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.177300 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.178056 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.179130 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.179376 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.179415 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.179482 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.179502 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.179539 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:20Z","lastTransitionTime":"2025-12-13T03:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.179752 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.181260 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.181855 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.182468 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.183594 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.184179 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.185363 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.185830 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.186520 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.188781 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.189308 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.189978 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.191142 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.191940 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.193074 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.193784 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.194996 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.195667 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.196829 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.197365 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.197929 5070 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.198053 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.200478 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.201193 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.202429 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.204619 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.205499 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.206834 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.207878 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.209340 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.209989 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.211488 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.212419 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.213724 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.214346 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.215727 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.216538 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.218293 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.218955 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.220330 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.221017 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.221873 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.222714 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.223248 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.282979 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.283047 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.283061 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.283080 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.283093 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:20Z","lastTransitionTime":"2025-12-13T03:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.369429 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"30c6ed669d99edec482a67a465c8b8aa3ba763cbfc597da5d74b197b61b4bea5"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.369494 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ecdef69736ec5c8e73ddeb6751245003bd7fb2a32c2ddea4cf3ccd8ae29f6e2b"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.371824 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.375392 5070 scope.go:117] "RemoveContainer" containerID="bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41" Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.375848 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.376343 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9cf2c62068c91be5ea5162d5c01bc1d23f725d6f5eb5b9eddc1e2160ecab3c2e"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.376417 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5ff27f4b6d1d059cfd8b29fb98a347484550a85a8742c729592c9353d28703da"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.376605 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9a988291bb51ca489d4275c3b38983d1c74f8ab913bd559d7fd1145395928251"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.377722 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4165d9a82d1d1209fd17f491fccc0a902972a3c6a7742d4cbc0947586b48ede7"} Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.383750 5070 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.384023 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.385488 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.385546 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.385562 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.385585 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.385603 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:20Z","lastTransitionTime":"2025-12-13T03:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.399130 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.411551 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.426359 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.445855 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.463620 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e688eb2-80cc-47e6-9b7b-748d6e6175ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd6cdb068faf5f763aec54b08a8597572a72fbc5685b112558c444fda675eeb1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:16Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 03:12:08.179364 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:08.180879 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2146923830/tls.crt::/tmp/serving-cert-2146923830/tls.key\\\\\\\"\\\\nI1213 03:12:16.380881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:16.383343 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:16.383365 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:16.383398 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:16.383404 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:16.389728 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:16.389835 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:16.389872 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:16.389895 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:16.389935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI1213 03:12:16.389735 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1213 03:12:16.389972 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:16.390048 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1213 03:12:16.395221 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:18Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 03:12:18.048782 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 03:12:18.048910 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:18.049664 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2169446048/tls.crt::/tmp/serving-cert-2169446048/tls.key\\\\\\\"\\\\nI1213 03:12:18.331471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:18.333517 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:18.333571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:18.333621 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:18.333650 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:18.339654 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:18.339723 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339796 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339862 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:18.339884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 03:12:18.340001 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:18.340065 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 03:12:18.339662 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 03:12:18.345582 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:12:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T03:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.477639 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d28ac43-3a1f-4f2b-aba8-0177f11d672f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a598d49ebe10cda296cbb779c7bd5cf606e827af564ac0ae813eb7ab0cfca9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c7d4f0ad60bf4cac9e70945fd023c2bbe79250fac7118c090d735ad2965c6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43350ca69ace891078b304f722e18e3bb897227db032f20bc060516caebf72fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ccc8e4e5fc8cf1bdd87599e40edb02236a05c8698aa097a4181020f60da1a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.489400 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.489456 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.489469 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.489488 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.489502 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:20Z","lastTransitionTime":"2025-12-13T03:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.491553 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30c6ed669d99edec482a67a465c8b8aa3ba763cbfc597da5d74b197b61b4bea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:12:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.507535 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.523785 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.544383 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30c6ed669d99edec482a67a465c8b8aa3ba763cbfc597da5d74b197b61b4bea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:12:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.562948 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.578213 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cf2c62068c91be5ea5162d5c01bc1d23f725d6f5eb5b9eddc1e2160ecab3c2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:12:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff27f4b6d1d059cfd8b29fb98a347484550a85a8742c729592c9353d28703da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:12:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.592407 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.592459 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.592473 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.592488 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.592498 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:20Z","lastTransitionTime":"2025-12-13T03:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.597229 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.617367 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e688eb2-80cc-47e6-9b7b-748d6e6175ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:18Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 03:12:18.048782 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 03:12:18.048910 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:18.049664 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2169446048/tls.crt::/tmp/serving-cert-2169446048/tls.key\\\\\\\"\\\\nI1213 03:12:18.331471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:18.333517 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:18.333571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:18.333621 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:18.333650 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:18.339654 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:18.339723 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339796 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339862 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:18.339884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 03:12:18.340001 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:18.340065 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 03:12:18.339662 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 03:12:18.345582 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:12:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T03:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.630766 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d28ac43-3a1f-4f2b-aba8-0177f11d672f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a598d49ebe10cda296cbb779c7bd5cf606e827af564ac0ae813eb7ab0cfca9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c7d4f0ad60bf4cac9e70945fd023c2bbe79250fac7118c090d735ad2965c6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43350ca69ace891078b304f722e18e3bb897227db032f20bc060516caebf72fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ccc8e4e5fc8cf1bdd87599e40edb02236a05c8698aa097a4181020f60da1a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:20Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.695396 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.695478 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.695493 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.695517 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.695529 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:20Z","lastTransitionTime":"2025-12-13T03:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.798121 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.798180 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.798227 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.798250 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.798261 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:20Z","lastTransitionTime":"2025-12-13T03:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.905087 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.905145 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.905157 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.905177 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.905190 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:20Z","lastTransitionTime":"2025-12-13T03:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.924205 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.924369 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.924407 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924474 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:22.924412578 +0000 UTC m=+35.160256124 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.924545 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:20 crc kubenswrapper[5070]: I1213 03:12:20.924597 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924610 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924638 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924653 5070 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924662 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924717 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924719 5070 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924737 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:22.924715206 +0000 UTC m=+35.160558922 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924738 5070 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924862 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:22.924836139 +0000 UTC m=+35.160679865 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924904 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:22.92488662 +0000 UTC m=+35.160730166 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924752 5070 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:20 crc kubenswrapper[5070]: E1213 03:12:20.924957 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:22.924950912 +0000 UTC m=+35.160794458 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.011519 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.011571 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.011580 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.011597 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.011608 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:21Z","lastTransitionTime":"2025-12-13T03:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.078857 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-cvqqv"] Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.079145 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-9l8nl"] Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.079311 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-cvqqv" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.079320 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.081885 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.083817 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.085546 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.085785 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.085935 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.086191 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.087459 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.112074 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30c6ed669d99edec482a67a465c8b8aa3ba763cbfc597da5d74b197b61b4bea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:12:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:21Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.119929 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.119966 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.119977 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.119993 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.120004 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:21Z","lastTransitionTime":"2025-12-13T03:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.126967 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/49eb1fc8-400f-470b-8664-39fd6c652542-host\") pod \"node-ca-9l8nl\" (UID: \"49eb1fc8-400f-470b-8664-39fd6c652542\") " pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.127025 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b25nv\" (UniqueName: \"kubernetes.io/projected/8298b7de-21e2-4cb5-b4b4-0f556d87b792-kube-api-access-b25nv\") pod \"node-resolver-cvqqv\" (UID: \"8298b7de-21e2-4cb5-b4b4-0f556d87b792\") " pod="openshift-dns/node-resolver-cvqqv" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.127046 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/49eb1fc8-400f-470b-8664-39fd6c652542-serviceca\") pod \"node-ca-9l8nl\" (UID: \"49eb1fc8-400f-470b-8664-39fd6c652542\") " pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.127065 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8298b7de-21e2-4cb5-b4b4-0f556d87b792-hosts-file\") pod \"node-resolver-cvqqv\" (UID: \"8298b7de-21e2-4cb5-b4b4-0f556d87b792\") " pod="openshift-dns/node-resolver-cvqqv" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.127089 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfp2x\" (UniqueName: \"kubernetes.io/projected/49eb1fc8-400f-470b-8664-39fd6c652542-kube-api-access-wfp2x\") pod \"node-ca-9l8nl\" (UID: \"49eb1fc8-400f-470b-8664-39fd6c652542\") " pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.138411 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:21Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.159036 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cf2c62068c91be5ea5162d5c01bc1d23f725d6f5eb5b9eddc1e2160ecab3c2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:12:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ff27f4b6d1d059cfd8b29fb98a347484550a85a8742c729592c9353d28703da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:12:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:21Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.166026 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:21 crc kubenswrapper[5070]: E1213 03:12:21.166159 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.166521 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:21 crc kubenswrapper[5070]: E1213 03:12:21.166590 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.166646 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:21 crc kubenswrapper[5070]: E1213 03:12:21.166704 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.189021 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:21Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.219646 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e688eb2-80cc-47e6-9b7b-748d6e6175ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T03:12:18Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 03:12:18.048782 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 03:12:18.048910 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 03:12:18.049664 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2169446048/tls.crt::/tmp/serving-cert-2169446048/tls.key\\\\\\\"\\\\nI1213 03:12:18.331471 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 03:12:18.333517 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 03:12:18.333571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 03:12:18.333621 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 03:12:18.333650 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 03:12:18.339654 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 03:12:18.339723 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339796 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 03:12:18.339862 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 03:12:18.339884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 03:12:18.340001 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 03:12:18.340065 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 03:12:18.339662 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 03:12:18.345582 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T03:12:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T03:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:21Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.222686 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.222717 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.222729 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.222746 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.222758 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:21Z","lastTransitionTime":"2025-12-13T03:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.227360 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8298b7de-21e2-4cb5-b4b4-0f556d87b792-hosts-file\") pod \"node-resolver-cvqqv\" (UID: \"8298b7de-21e2-4cb5-b4b4-0f556d87b792\") " pod="openshift-dns/node-resolver-cvqqv" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.227407 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/49eb1fc8-400f-470b-8664-39fd6c652542-serviceca\") pod \"node-ca-9l8nl\" (UID: \"49eb1fc8-400f-470b-8664-39fd6c652542\") " pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.227427 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfp2x\" (UniqueName: \"kubernetes.io/projected/49eb1fc8-400f-470b-8664-39fd6c652542-kube-api-access-wfp2x\") pod \"node-ca-9l8nl\" (UID: \"49eb1fc8-400f-470b-8664-39fd6c652542\") " pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.227494 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/49eb1fc8-400f-470b-8664-39fd6c652542-host\") pod \"node-ca-9l8nl\" (UID: \"49eb1fc8-400f-470b-8664-39fd6c652542\") " pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.227535 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b25nv\" (UniqueName: \"kubernetes.io/projected/8298b7de-21e2-4cb5-b4b4-0f556d87b792-kube-api-access-b25nv\") pod \"node-resolver-cvqqv\" (UID: \"8298b7de-21e2-4cb5-b4b4-0f556d87b792\") " pod="openshift-dns/node-resolver-cvqqv" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.227863 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8298b7de-21e2-4cb5-b4b4-0f556d87b792-hosts-file\") pod \"node-resolver-cvqqv\" (UID: \"8298b7de-21e2-4cb5-b4b4-0f556d87b792\") " pod="openshift-dns/node-resolver-cvqqv" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.228465 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/49eb1fc8-400f-470b-8664-39fd6c652542-host\") pod \"node-ca-9l8nl\" (UID: \"49eb1fc8-400f-470b-8664-39fd6c652542\") " pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.228866 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/49eb1fc8-400f-470b-8664-39fd6c652542-serviceca\") pod \"node-ca-9l8nl\" (UID: \"49eb1fc8-400f-470b-8664-39fd6c652542\") " pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.265626 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfp2x\" (UniqueName: \"kubernetes.io/projected/49eb1fc8-400f-470b-8664-39fd6c652542-kube-api-access-wfp2x\") pod \"node-ca-9l8nl\" (UID: \"49eb1fc8-400f-470b-8664-39fd6c652542\") " pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.272492 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b25nv\" (UniqueName: \"kubernetes.io/projected/8298b7de-21e2-4cb5-b4b4-0f556d87b792-kube-api-access-b25nv\") pod \"node-resolver-cvqqv\" (UID: \"8298b7de-21e2-4cb5-b4b4-0f556d87b792\") " pod="openshift-dns/node-resolver-cvqqv" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.289481 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d28ac43-3a1f-4f2b-aba8-0177f11d672f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:11:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a598d49ebe10cda296cbb779c7bd5cf606e827af564ac0ae813eb7ab0cfca9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32c7d4f0ad60bf4cac9e70945fd023c2bbe79250fac7118c090d735ad2965c6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43350ca69ace891078b304f722e18e3bb897227db032f20bc060516caebf72fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ccc8e4e5fc8cf1bdd87599e40edb02236a05c8698aa097a4181020f60da1a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T03:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:11:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:21Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.318453 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-9l8nl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"49eb1fc8-400f-470b-8664-39fd6c652542\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:21Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:21Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wfp2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:12:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-9l8nl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:21Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.328145 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.328201 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.328213 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.328239 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.328254 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:21Z","lastTransitionTime":"2025-12-13T03:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.379068 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvqqv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8298b7de-21e2-4cb5-b4b4-0f556d87b792\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b25nv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T03:12:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvqqv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:21Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.380169 5070 scope.go:117] "RemoveContainer" containerID="bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41" Dec 13 03:12:21 crc kubenswrapper[5070]: E1213 03:12:21.380300 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.393147 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-cvqqv" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.396319 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:21Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.399173 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-9l8nl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.426810 5070 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T03:12:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T03:12:21Z is after 2025-08-24T17:21:41Z" Dec 13 03:12:21 crc kubenswrapper[5070]: W1213 03:12:21.429692 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49eb1fc8_400f_470b_8664_39fd6c652542.slice/crio-a27fd9e61c0f9136e19175e986541167a65e48c859eb61471d58da0f042da762 WatchSource:0}: Error finding container a27fd9e61c0f9136e19175e986541167a65e48c859eb61471d58da0f042da762: Status 404 returned error can't find the container with id a27fd9e61c0f9136e19175e986541167a65e48c859eb61471d58da0f042da762 Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.430583 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.430609 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.430618 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.430632 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.430642 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:21Z","lastTransitionTime":"2025-12-13T03:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.539198 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.539239 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.539252 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.539270 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.539282 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:21Z","lastTransitionTime":"2025-12-13T03:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.552891 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=2.552859937 podStartE2EDuration="2.552859937s" podCreationTimestamp="2025-12-13 03:12:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:21.552814796 +0000 UTC m=+33.788658352" watchObservedRunningTime="2025-12-13 03:12:21.552859937 +0000 UTC m=+33.788703483" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.581504 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-9l4rb"] Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.588591 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-c4c69"] Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.588754 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-2wg5p"] Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.589543 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.589972 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7pmp8"] Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.590020 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.590741 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.591012 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.596142 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.596322 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.596426 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.596653 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.597031 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.597180 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.597261 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.597361 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.597423 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.597480 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.600244 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.600336 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.600493 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.600257 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.600654 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.600730 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.600829 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.600850 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.601158 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.633770 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a2e447c7-5901-414f-af96-69441d4750db-proxy-tls\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.633817 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-openvswitch\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.633839 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.633861 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-script-lib\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634246 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-conf-dir\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634296 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-run-netns\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634316 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-daemon-config\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634338 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-systemd-units\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634362 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-etc-openvswitch\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634380 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-bin\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634401 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-config\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634422 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-run-multus-certs\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634467 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634501 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhvc2\" (UniqueName: \"kubernetes.io/projected/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-kube-api-access-zhvc2\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634528 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-cnibin\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634547 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-cni-binary-copy\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634562 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmrh9\" (UniqueName: \"kubernetes.io/projected/a2e447c7-5901-414f-af96-69441d4750db-kube-api-access-bmrh9\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634578 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a2e447c7-5901-414f-af96-69441d4750db-mcd-auth-proxy-config\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634664 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-netns\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634703 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-systemd\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634723 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-node-log\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634741 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-ovn\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634759 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-log-socket\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634779 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-kubelet\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634796 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-os-release\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634812 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmh8w\" (UniqueName: \"kubernetes.io/projected/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-kube-api-access-mmh8w\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634829 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-var-lib-kubelet\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634843 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-netd\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634858 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-socket-dir-parent\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634872 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-etc-kubernetes\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634887 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-hostroot\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634925 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-system-cni-dir\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634948 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634970 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-system-cni-dir\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.634989 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-var-lib-cni-bin\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635011 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-env-overrides\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635032 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-cnibin\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635052 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-cni-binary-copy\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635072 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-var-lib-cni-multus\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635100 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovn-node-metrics-cert\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635122 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-cni-dir\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635142 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-run-k8s-cni-cncf-io\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635165 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a2e447c7-5901-414f-af96-69441d4750db-rootfs\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635194 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-slash\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635211 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-var-lib-openvswitch\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635232 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-ovn-kubernetes\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635252 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnwm2\" (UniqueName: \"kubernetes.io/projected/65d45c3c-cac7-4578-b0b5-05e546d8e356-kube-api-access-fnwm2\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.635270 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-os-release\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.643418 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.643663 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.643760 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.643849 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.643948 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:21Z","lastTransitionTime":"2025-12-13T03:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736565 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-cni-binary-copy\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736604 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736634 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhvc2\" (UniqueName: \"kubernetes.io/projected/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-kube-api-access-zhvc2\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736654 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-cnibin\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736671 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmrh9\" (UniqueName: \"kubernetes.io/projected/a2e447c7-5901-414f-af96-69441d4750db-kube-api-access-bmrh9\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736687 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a2e447c7-5901-414f-af96-69441d4750db-mcd-auth-proxy-config\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736704 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-node-log\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736727 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-netns\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736741 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-systemd\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736759 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-ovn\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736774 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-log-socket\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736789 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-var-lib-kubelet\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736802 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-kubelet\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736815 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-os-release\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736831 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmh8w\" (UniqueName: \"kubernetes.io/projected/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-kube-api-access-mmh8w\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736848 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-netd\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736862 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-socket-dir-parent\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736877 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-etc-kubernetes\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736891 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-var-lib-cni-bin\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736906 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-hostroot\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736934 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-system-cni-dir\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736951 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736967 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-system-cni-dir\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736982 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-env-overrides\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.736999 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-cnibin\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737018 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-cni-binary-copy\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737034 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-var-lib-cni-multus\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737052 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a2e447c7-5901-414f-af96-69441d4750db-rootfs\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737092 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovn-node-metrics-cert\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737107 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-cni-dir\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737121 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-run-k8s-cni-cncf-io\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737149 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-slash\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737165 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-var-lib-openvswitch\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737182 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-ovn-kubernetes\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737197 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnwm2\" (UniqueName: \"kubernetes.io/projected/65d45c3c-cac7-4578-b0b5-05e546d8e356-kube-api-access-fnwm2\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737213 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-os-release\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737230 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-conf-dir\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737250 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a2e447c7-5901-414f-af96-69441d4750db-proxy-tls\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737270 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-openvswitch\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737289 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737310 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-script-lib\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737332 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-config\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737334 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737398 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-run-netns\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737349 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-run-netns\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737454 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-daemon-config\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737455 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-cni-binary-copy\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737476 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-systemd-units\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737499 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-systemd-units\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737509 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-etc-openvswitch\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737535 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-bin\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737561 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-run-multus-certs\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737652 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-cnibin\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737660 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-run-multus-certs\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737701 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-etc-openvswitch\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737733 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-bin\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737796 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-cnibin\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.737945 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-daemon-config\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738109 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-os-release\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738144 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-ovn-kubernetes\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738170 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-var-lib-cni-multus\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738194 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a2e447c7-5901-414f-af96-69441d4750db-rootfs\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738266 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-env-overrides\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738343 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-cni-binary-copy\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738527 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-os-release\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738554 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-conf-dir\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738635 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-netd\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.739003 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-hostroot\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.739041 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-system-cni-dir\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738806 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-var-lib-openvswitch\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738828 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738787 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-slash\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738864 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-ovn\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738881 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-node-log\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.739125 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-system-cni-dir\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.739097 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-cni-dir\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738949 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-systemd\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738965 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-log-socket\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738969 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-var-lib-kubelet\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738988 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-etc-kubernetes\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738990 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-kubelet\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738761 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-run-k8s-cni-cncf-io\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.739017 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-multus-socket-dir-parent\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.739022 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-host-var-lib-cni-bin\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738845 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-openvswitch\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738897 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-netns\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.738912 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a2e447c7-5901-414f-af96-69441d4750db-mcd-auth-proxy-config\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.739465 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.739491 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-script-lib\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.739837 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-config\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.743850 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a2e447c7-5901-414f-af96-69441d4750db-proxy-tls\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.748186 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovn-node-metrics-cert\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.757871 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.758103 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.758186 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.758313 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.758386 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:21Z","lastTransitionTime":"2025-12-13T03:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.767785 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmh8w\" (UniqueName: \"kubernetes.io/projected/a79c51b6-42c7-41c3-9f2c-39a9a88e54a4-kube-api-access-mmh8w\") pod \"multus-additional-cni-plugins-2wg5p\" (UID: \"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4\") " pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.779880 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnwm2\" (UniqueName: \"kubernetes.io/projected/65d45c3c-cac7-4578-b0b5-05e546d8e356-kube-api-access-fnwm2\") pod \"ovnkube-node-7pmp8\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.785900 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmrh9\" (UniqueName: \"kubernetes.io/projected/a2e447c7-5901-414f-af96-69441d4750db-kube-api-access-bmrh9\") pod \"machine-config-daemon-9l4rb\" (UID: \"a2e447c7-5901-414f-af96-69441d4750db\") " pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.787836 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhvc2\" (UniqueName: \"kubernetes.io/projected/7ecb3a4a-4966-4cd1-bf07-aec91cf4212e-kube-api-access-zhvc2\") pod \"multus-c4c69\" (UID: \"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e\") " pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.861214 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.861260 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.861269 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.861287 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.861300 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:21Z","lastTransitionTime":"2025-12-13T03:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.909570 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-c4c69" Dec 13 03:12:21 crc kubenswrapper[5070]: W1213 03:12:21.922003 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ecb3a4a_4966_4cd1_bf07_aec91cf4212e.slice/crio-8a8ce53f6b64fbfcc742bcd29f46da4d6075511b8bdf721f95d101c76734c47f WatchSource:0}: Error finding container 8a8ce53f6b64fbfcc742bcd29f46da4d6075511b8bdf721f95d101c76734c47f: Status 404 returned error can't find the container with id 8a8ce53f6b64fbfcc742bcd29f46da4d6075511b8bdf721f95d101c76734c47f Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.941908 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.949931 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.962237 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.967621 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.967652 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.967661 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.967675 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:21 crc kubenswrapper[5070]: I1213 03:12:21.967685 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:21Z","lastTransitionTime":"2025-12-13T03:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:21 crc kubenswrapper[5070]: W1213 03:12:21.973343 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65d45c3c_cac7_4578_b0b5_05e546d8e356.slice/crio-b0410c0d7afa2272cf891cf4ca13aeb146961247c1674a992bb8963207013d18 WatchSource:0}: Error finding container b0410c0d7afa2272cf891cf4ca13aeb146961247c1674a992bb8963207013d18: Status 404 returned error can't find the container with id b0410c0d7afa2272cf891cf4ca13aeb146961247c1674a992bb8963207013d18 Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.070219 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.070249 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.070258 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.070271 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.070280 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:22Z","lastTransitionTime":"2025-12-13T03:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.175575 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.175620 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.175631 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.175651 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.175662 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:22Z","lastTransitionTime":"2025-12-13T03:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.249420 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r"] Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.249960 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.252479 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.252823 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.278760 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.278817 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.278827 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.278847 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.278858 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:22Z","lastTransitionTime":"2025-12-13T03:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.290906 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-tmwbx"] Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.291728 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.291888 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tmwbx" podUID="86e29ac2-28cb-457d-8f96-5a60c3d535e3" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.344296 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2cb648da-75a7-4412-9e05-7cd577ad96a4-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.344365 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.344392 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsgvz\" (UniqueName: \"kubernetes.io/projected/86e29ac2-28cb-457d-8f96-5a60c3d535e3-kube-api-access-lsgvz\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.344432 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl4n5\" (UniqueName: \"kubernetes.io/projected/2cb648da-75a7-4412-9e05-7cd577ad96a4-kube-api-access-pl4n5\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.344505 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2cb648da-75a7-4412-9e05-7cd577ad96a4-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.344529 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2cb648da-75a7-4412-9e05-7cd577ad96a4-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.382532 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.382572 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.382582 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.382598 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.382608 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:22Z","lastTransitionTime":"2025-12-13T03:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.387607 5070 generic.go:334] "Generic (PLEG): container finished" podID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerID="dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f" exitCode=0 Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.387683 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerDied","Data":"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.387724 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerStarted","Data":"b0410c0d7afa2272cf891cf4ca13aeb146961247c1674a992bb8963207013d18"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.391277 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c4c69" event={"ID":"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e","Type":"ContainerStarted","Data":"dd65753d5cfdc23519bf1aade864ce59e3d4ac4a3a3751c963e4ba0103a418aa"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.391573 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c4c69" event={"ID":"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e","Type":"ContainerStarted","Data":"8a8ce53f6b64fbfcc742bcd29f46da4d6075511b8bdf721f95d101c76734c47f"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.394261 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-cvqqv" event={"ID":"8298b7de-21e2-4cb5-b4b4-0f556d87b792","Type":"ContainerStarted","Data":"a7ad00f691dbf75e8712c1c1680855573a0bfcad4416d349435b291c9e40eff1"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.394334 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-cvqqv" event={"ID":"8298b7de-21e2-4cb5-b4b4-0f556d87b792","Type":"ContainerStarted","Data":"e2404e033b674e4f46bdd8786b62950608b669ff6ea87b4a4a8bf7c97f2b8858"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.396676 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-9l8nl" event={"ID":"49eb1fc8-400f-470b-8664-39fd6c652542","Type":"ContainerStarted","Data":"9e4fbb69f97096fdcc371583f3db6bba72848358d7141ef5f79bef92ab6f169a"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.396780 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-9l8nl" event={"ID":"49eb1fc8-400f-470b-8664-39fd6c652542","Type":"ContainerStarted","Data":"a27fd9e61c0f9136e19175e986541167a65e48c859eb61471d58da0f042da762"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.398952 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"985516406e64eddeec547dc5ddbf8f33e6f3fd400f4dc3ed852e2d3dc156bf7d"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.414699 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" event={"ID":"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4","Type":"ContainerStarted","Data":"dc9f9c50b99bb902d3a7f307d70f3b113bc49c6c2890cb20d968bd7d6bc1ab3b"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.414879 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" event={"ID":"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4","Type":"ContainerStarted","Data":"a0f0dd2a9db7b793d9944ebf2ba449d433aab0298918c9af9b19d0a5cfcdae44"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.423100 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"c2d50289c0ed1fca669a559cd930ba8a5956b47dcf5bfcd09135e95641462b37"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.423147 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"63922f5dc6cb463f08e809ea5fc2f1cd1f0d5a154bfaeed55a81f138c2ba391a"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.423157 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"e965e0be8787302bb267663e37a9ad5d394cb62fb82f22c9c086c6909348976b"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.445015 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.445284 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsgvz\" (UniqueName: \"kubernetes.io/projected/86e29ac2-28cb-457d-8f96-5a60c3d535e3-kube-api-access-lsgvz\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.445400 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl4n5\" (UniqueName: \"kubernetes.io/projected/2cb648da-75a7-4412-9e05-7cd577ad96a4-kube-api-access-pl4n5\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.445557 5070 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.445637 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs podName:86e29ac2-28cb-457d-8f96-5a60c3d535e3 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:22.945615024 +0000 UTC m=+35.181458570 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs") pod "network-metrics-daemon-tmwbx" (UID: "86e29ac2-28cb-457d-8f96-5a60c3d535e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.445727 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2cb648da-75a7-4412-9e05-7cd577ad96a4-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.445856 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2cb648da-75a7-4412-9e05-7cd577ad96a4-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.445966 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2cb648da-75a7-4412-9e05-7cd577ad96a4-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.459668 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2cb648da-75a7-4412-9e05-7cd577ad96a4-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.463030 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2cb648da-75a7-4412-9e05-7cd577ad96a4-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.463917 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2cb648da-75a7-4412-9e05-7cd577ad96a4-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.470181 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsgvz\" (UniqueName: \"kubernetes.io/projected/86e29ac2-28cb-457d-8f96-5a60c3d535e3-kube-api-access-lsgvz\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.476048 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl4n5\" (UniqueName: \"kubernetes.io/projected/2cb648da-75a7-4412-9e05-7cd577ad96a4-kube-api-access-pl4n5\") pod \"ovnkube-control-plane-749d76644c-nhg2r\" (UID: \"2cb648da-75a7-4412-9e05-7cd577ad96a4\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.477108 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-9l8nl" podStartSLOduration=1.477094435 podStartE2EDuration="1.477094435s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:22.443077077 +0000 UTC m=+34.678920643" watchObservedRunningTime="2025-12-13 03:12:22.477094435 +0000 UTC m=+34.712937991" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.477572 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-cvqqv" podStartSLOduration=1.4775652369999999 podStartE2EDuration="1.477565237s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:22.476627013 +0000 UTC m=+34.712470569" watchObservedRunningTime="2025-12-13 03:12:22.477565237 +0000 UTC m=+34.713408783" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.485359 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.485607 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.485686 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.485766 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.485859 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:22Z","lastTransitionTime":"2025-12-13T03:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.511892 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-c4c69" podStartSLOduration=1.511872254 podStartE2EDuration="1.511872254s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:22.4939576 +0000 UTC m=+34.729801146" watchObservedRunningTime="2025-12-13 03:12:22.511872254 +0000 UTC m=+34.747715790" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.525600 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podStartSLOduration=1.5255825650000001 podStartE2EDuration="1.525582565s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:22.524908037 +0000 UTC m=+34.760751583" watchObservedRunningTime="2025-12-13 03:12:22.525582565 +0000 UTC m=+34.761426101" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.562534 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.588677 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.588718 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.588728 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.588742 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.588751 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:22Z","lastTransitionTime":"2025-12-13T03:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:22 crc kubenswrapper[5070]: W1213 03:12:22.596283 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2cb648da_75a7_4412_9e05_7cd577ad96a4.slice/crio-e45c8e35c55e98bd1541780d2922061492c15fa6c202b148d1ca7846cd8ad9bb WatchSource:0}: Error finding container e45c8e35c55e98bd1541780d2922061492c15fa6c202b148d1ca7846cd8ad9bb: Status 404 returned error can't find the container with id e45c8e35c55e98bd1541780d2922061492c15fa6c202b148d1ca7846cd8ad9bb Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.692252 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.692559 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.692568 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.692583 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.692592 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:22Z","lastTransitionTime":"2025-12-13T03:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.794728 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.794762 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.794771 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.794786 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.794795 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:22Z","lastTransitionTime":"2025-12-13T03:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.897273 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.897301 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.897309 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.897321 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.897330 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:22Z","lastTransitionTime":"2025-12-13T03:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.950952 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.951094 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.951117 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.951141 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.951168 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:22 crc kubenswrapper[5070]: I1213 03:12:22.951186 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951291 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951308 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951318 5070 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951363 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:26.951348264 +0000 UTC m=+39.187191810 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951687 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:26.951675114 +0000 UTC m=+39.187518660 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951727 5070 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951749 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:26.951743655 +0000 UTC m=+39.187587201 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951787 5070 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951808 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:26.951802587 +0000 UTC m=+39.187646133 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951846 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951857 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951866 5070 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951884 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:26.951878939 +0000 UTC m=+39.187722485 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951917 5070 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 03:12:22 crc kubenswrapper[5070]: E1213 03:12:22.951933 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs podName:86e29ac2-28cb-457d-8f96-5a60c3d535e3 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:23.95192829 +0000 UTC m=+36.187771836 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs") pod "network-metrics-daemon-tmwbx" (UID: "86e29ac2-28cb-457d-8f96-5a60c3d535e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.000156 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.000188 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.000197 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.000209 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.000218 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:23Z","lastTransitionTime":"2025-12-13T03:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.102871 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.102933 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.102943 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.102958 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.102967 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:23Z","lastTransitionTime":"2025-12-13T03:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.166096 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.166132 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.166141 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:23 crc kubenswrapper[5070]: E1213 03:12:23.166238 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 03:12:23 crc kubenswrapper[5070]: E1213 03:12:23.166342 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 03:12:23 crc kubenswrapper[5070]: E1213 03:12:23.166469 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.205484 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.205907 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.205924 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.205945 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.205959 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:23Z","lastTransitionTime":"2025-12-13T03:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.308249 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.308281 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.308295 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.308313 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.308326 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:23Z","lastTransitionTime":"2025-12-13T03:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.411640 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.411690 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.411702 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.411719 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.411731 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:23Z","lastTransitionTime":"2025-12-13T03:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.428399 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerStarted","Data":"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.428465 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerStarted","Data":"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.430561 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" event={"ID":"2cb648da-75a7-4412-9e05-7cd577ad96a4","Type":"ContainerStarted","Data":"ed1d955261912ac81e5f00a2e639774b256b86692cc833718f247932d26063d0"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.430586 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" event={"ID":"2cb648da-75a7-4412-9e05-7cd577ad96a4","Type":"ContainerStarted","Data":"e45c8e35c55e98bd1541780d2922061492c15fa6c202b148d1ca7846cd8ad9bb"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.432994 5070 generic.go:334] "Generic (PLEG): container finished" podID="a79c51b6-42c7-41c3-9f2c-39a9a88e54a4" containerID="dc9f9c50b99bb902d3a7f307d70f3b113bc49c6c2890cb20d968bd7d6bc1ab3b" exitCode=0 Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.433113 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" event={"ID":"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4","Type":"ContainerDied","Data":"dc9f9c50b99bb902d3a7f307d70f3b113bc49c6c2890cb20d968bd7d6bc1ab3b"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.517685 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.517733 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.517745 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.517769 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.517786 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:23Z","lastTransitionTime":"2025-12-13T03:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.630969 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.631398 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.631411 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.631468 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.631482 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:23Z","lastTransitionTime":"2025-12-13T03:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.733881 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.733927 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.733946 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.734014 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.734029 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:23Z","lastTransitionTime":"2025-12-13T03:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.836897 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.836947 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.836961 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.836981 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.836996 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:23Z","lastTransitionTime":"2025-12-13T03:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.940871 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.940911 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.940924 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.940940 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.940950 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:23Z","lastTransitionTime":"2025-12-13T03:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:23 crc kubenswrapper[5070]: I1213 03:12:23.967787 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:23 crc kubenswrapper[5070]: E1213 03:12:23.967890 5070 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 03:12:23 crc kubenswrapper[5070]: E1213 03:12:23.967948 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs podName:86e29ac2-28cb-457d-8f96-5a60c3d535e3 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:25.9679344 +0000 UTC m=+38.203777946 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs") pod "network-metrics-daemon-tmwbx" (UID: "86e29ac2-28cb-457d-8f96-5a60c3d535e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.043570 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.044086 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.044105 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.044123 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.044136 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:24Z","lastTransitionTime":"2025-12-13T03:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.146191 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.146225 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.146233 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.146249 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.146260 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:24Z","lastTransitionTime":"2025-12-13T03:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.166685 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:24 crc kubenswrapper[5070]: E1213 03:12:24.166834 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tmwbx" podUID="86e29ac2-28cb-457d-8f96-5a60c3d535e3" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.249037 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.249077 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.249088 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.249103 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.249113 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:24Z","lastTransitionTime":"2025-12-13T03:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.352075 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.352162 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.352199 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.352232 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.352279 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:24Z","lastTransitionTime":"2025-12-13T03:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.438973 5070 generic.go:334] "Generic (PLEG): container finished" podID="a79c51b6-42c7-41c3-9f2c-39a9a88e54a4" containerID="c3f07c32b10860773108caf6c204ef31ae9a92a16ca9c6923504c75cad6f2ea7" exitCode=0 Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.439102 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" event={"ID":"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4","Type":"ContainerDied","Data":"c3f07c32b10860773108caf6c204ef31ae9a92a16ca9c6923504c75cad6f2ea7"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.445574 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerStarted","Data":"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.445625 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerStarted","Data":"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.445638 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerStarted","Data":"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.445649 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerStarted","Data":"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.447838 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" event={"ID":"2cb648da-75a7-4412-9e05-7cd577ad96a4","Type":"ContainerStarted","Data":"a8ea43383a6c5325a9345371604929972bc26a09ad1fedaab1d24e2d2ed5c228"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.456043 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.456094 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.456113 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.456136 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.456155 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:24Z","lastTransitionTime":"2025-12-13T03:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.558612 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.558643 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.558653 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.558666 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.558676 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:24Z","lastTransitionTime":"2025-12-13T03:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.661653 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.661698 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.661712 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.661733 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.661758 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:24Z","lastTransitionTime":"2025-12-13T03:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.764329 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.764657 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.764665 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.764679 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.764687 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:24Z","lastTransitionTime":"2025-12-13T03:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.867358 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.867397 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.867406 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.867418 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.867427 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:24Z","lastTransitionTime":"2025-12-13T03:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.970157 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.970203 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.970216 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.970237 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:24 crc kubenswrapper[5070]: I1213 03:12:24.970250 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:24Z","lastTransitionTime":"2025-12-13T03:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.072243 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.072288 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.072299 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.072316 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.072328 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:25Z","lastTransitionTime":"2025-12-13T03:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.166270 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.166345 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:25 crc kubenswrapper[5070]: E1213 03:12:25.166393 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.166274 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:25 crc kubenswrapper[5070]: E1213 03:12:25.166566 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 03:12:25 crc kubenswrapper[5070]: E1213 03:12:25.166686 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.175274 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.175339 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.175362 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.175389 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.175410 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:25Z","lastTransitionTime":"2025-12-13T03:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.278834 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.279075 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.279087 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.279104 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.279115 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:25Z","lastTransitionTime":"2025-12-13T03:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.381345 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.381391 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.381403 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.381421 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.381433 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:25Z","lastTransitionTime":"2025-12-13T03:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.455486 5070 generic.go:334] "Generic (PLEG): container finished" podID="a79c51b6-42c7-41c3-9f2c-39a9a88e54a4" containerID="1eac7fcd563ada26ef152520dbc5bf11d1847d30a07865925b985509499642b8" exitCode=0 Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.455606 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" event={"ID":"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4","Type":"ContainerDied","Data":"1eac7fcd563ada26ef152520dbc5bf11d1847d30a07865925b985509499642b8"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.479951 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nhg2r" podStartSLOduration=4.479922634 podStartE2EDuration="4.479922634s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:24.497393907 +0000 UTC m=+36.733237463" watchObservedRunningTime="2025-12-13 03:12:25.479922634 +0000 UTC m=+37.715766220" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.483824 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.483855 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.483865 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.483880 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.483890 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:25Z","lastTransitionTime":"2025-12-13T03:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.586014 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.586062 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.586074 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.586094 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.586108 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:25Z","lastTransitionTime":"2025-12-13T03:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.690847 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.690880 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.690890 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.690904 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.690916 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:25Z","lastTransitionTime":"2025-12-13T03:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.793106 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.793147 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.793156 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.793171 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.793179 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:25Z","lastTransitionTime":"2025-12-13T03:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.895875 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.895918 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.895929 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.895949 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.895960 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:25Z","lastTransitionTime":"2025-12-13T03:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.987210 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:25 crc kubenswrapper[5070]: E1213 03:12:25.987358 5070 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 03:12:25 crc kubenswrapper[5070]: E1213 03:12:25.987418 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs podName:86e29ac2-28cb-457d-8f96-5a60c3d535e3 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:29.98740214 +0000 UTC m=+42.223245676 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs") pod "network-metrics-daemon-tmwbx" (UID: "86e29ac2-28cb-457d-8f96-5a60c3d535e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.998194 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.998230 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.998239 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.998253 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:25 crc kubenswrapper[5070]: I1213 03:12:25.998263 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:25Z","lastTransitionTime":"2025-12-13T03:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.101848 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.102006 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.102025 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.102050 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.102068 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:26Z","lastTransitionTime":"2025-12-13T03:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.166376 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:26 crc kubenswrapper[5070]: E1213 03:12:26.166523 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tmwbx" podUID="86e29ac2-28cb-457d-8f96-5a60c3d535e3" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.203966 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.203995 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.204003 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.204016 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.204027 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:26Z","lastTransitionTime":"2025-12-13T03:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.307426 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.307522 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.307540 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.307565 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.307582 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:26Z","lastTransitionTime":"2025-12-13T03:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.409839 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.409897 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.409908 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.409927 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.409942 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:26Z","lastTransitionTime":"2025-12-13T03:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.461915 5070 generic.go:334] "Generic (PLEG): container finished" podID="a79c51b6-42c7-41c3-9f2c-39a9a88e54a4" containerID="6fd5a8c7fed2b59ad423840f5552d4e7a6b040d108ed25d56550a04dd951b229" exitCode=0 Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.462010 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" event={"ID":"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4","Type":"ContainerDied","Data":"6fd5a8c7fed2b59ad423840f5552d4e7a6b040d108ed25d56550a04dd951b229"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.467609 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerStarted","Data":"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.514102 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.514156 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.514169 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.514190 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.514202 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:26Z","lastTransitionTime":"2025-12-13T03:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.616590 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.616625 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.616636 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.616838 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.616912 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:26Z","lastTransitionTime":"2025-12-13T03:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.719848 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.719884 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.719893 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.719908 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.719917 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:26Z","lastTransitionTime":"2025-12-13T03:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.823535 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.823582 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.823600 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.823625 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.823643 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:26Z","lastTransitionTime":"2025-12-13T03:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.925752 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.925786 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.925795 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.925808 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:26 crc kubenswrapper[5070]: I1213 03:12:26.925817 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:26Z","lastTransitionTime":"2025-12-13T03:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.000615 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.000696 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.000725 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.000751 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.000778 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.000898 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.000914 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.000928 5070 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.000954 5070 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.000996 5070 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.001001 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.001162 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.000984 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:35.000970026 +0000 UTC m=+47.236813582 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.001193 5070 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.001218 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:35.001189112 +0000 UTC m=+47.237032678 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.001238 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:35.001231593 +0000 UTC m=+47.237075149 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.001249 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:35.001244213 +0000 UTC m=+47.237087759 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.001402 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:35.001381447 +0000 UTC m=+47.237225053 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.028791 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.028846 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.028858 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.028881 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.028895 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:27Z","lastTransitionTime":"2025-12-13T03:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.063286 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.063338 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.063349 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.063367 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.063378 5070 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T03:12:27Z","lastTransitionTime":"2025-12-13T03:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.120236 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf"] Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.120792 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.122987 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.123179 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.123192 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.123259 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.166283 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.166418 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.166596 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.166730 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.166996 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 03:12:27 crc kubenswrapper[5070]: E1213 03:12:27.166732 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.202700 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.202759 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.202786 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.202821 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.202838 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.304021 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.304075 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.304110 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.304142 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.304167 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.304234 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.304546 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.305732 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.317337 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.320811 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d05de5bb-31c5-48dc-9119-60ffacbbe9ae-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8qblf\" (UID: \"d05de5bb-31c5-48dc-9119-60ffacbbe9ae\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.438997 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" Dec 13 03:12:27 crc kubenswrapper[5070]: W1213 03:12:27.463305 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd05de5bb_31c5_48dc_9119_60ffacbbe9ae.slice/crio-fe96940bc03316d592ff04d831097354ed74abf585bc682a8c5189ea8f7820f7 WatchSource:0}: Error finding container fe96940bc03316d592ff04d831097354ed74abf585bc682a8c5189ea8f7820f7: Status 404 returned error can't find the container with id fe96940bc03316d592ff04d831097354ed74abf585bc682a8c5189ea8f7820f7 Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.476137 5070 generic.go:334] "Generic (PLEG): container finished" podID="a79c51b6-42c7-41c3-9f2c-39a9a88e54a4" containerID="addde987b9aca5e8c8111c32b7bdf99cfd4114c3fd0c032978f7170915c26ced" exitCode=0 Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.476261 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" event={"ID":"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4","Type":"ContainerDied","Data":"addde987b9aca5e8c8111c32b7bdf99cfd4114c3fd0c032978f7170915c26ced"} Dec 13 03:12:27 crc kubenswrapper[5070]: I1213 03:12:27.479632 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" event={"ID":"d05de5bb-31c5-48dc-9119-60ffacbbe9ae","Type":"ContainerStarted","Data":"fe96940bc03316d592ff04d831097354ed74abf585bc682a8c5189ea8f7820f7"} Dec 13 03:12:28 crc kubenswrapper[5070]: I1213 03:12:28.166686 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:28 crc kubenswrapper[5070]: E1213 03:12:28.168590 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tmwbx" podUID="86e29ac2-28cb-457d-8f96-5a60c3d535e3" Dec 13 03:12:28 crc kubenswrapper[5070]: I1213 03:12:28.484653 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" event={"ID":"d05de5bb-31c5-48dc-9119-60ffacbbe9ae","Type":"ContainerStarted","Data":"f3acf7fa7fc09b89b20d658fd6d49e3fff03cdd9db4cbe72aef05acfebe1892c"} Dec 13 03:12:28 crc kubenswrapper[5070]: I1213 03:12:28.487514 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" event={"ID":"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4","Type":"ContainerStarted","Data":"eb8735bf7c11abd4479e4a486fe9d242c87eba9e64e4d23670c89b23f2021c83"} Dec 13 03:12:28 crc kubenswrapper[5070]: I1213 03:12:28.504702 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8qblf" podStartSLOduration=7.50468269 podStartE2EDuration="7.50468269s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:28.502286107 +0000 UTC m=+40.738129663" watchObservedRunningTime="2025-12-13 03:12:28.50468269 +0000 UTC m=+40.740526256" Dec 13 03:12:29 crc kubenswrapper[5070]: I1213 03:12:29.166170 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:29 crc kubenswrapper[5070]: E1213 03:12:29.166784 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 03:12:29 crc kubenswrapper[5070]: I1213 03:12:29.166200 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:29 crc kubenswrapper[5070]: E1213 03:12:29.166913 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 03:12:29 crc kubenswrapper[5070]: I1213 03:12:29.166200 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:29 crc kubenswrapper[5070]: E1213 03:12:29.167008 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 03:12:29 crc kubenswrapper[5070]: I1213 03:12:29.499789 5070 generic.go:334] "Generic (PLEG): container finished" podID="a79c51b6-42c7-41c3-9f2c-39a9a88e54a4" containerID="eb8735bf7c11abd4479e4a486fe9d242c87eba9e64e4d23670c89b23f2021c83" exitCode=0 Dec 13 03:12:29 crc kubenswrapper[5070]: I1213 03:12:29.499849 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" event={"ID":"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4","Type":"ContainerDied","Data":"eb8735bf7c11abd4479e4a486fe9d242c87eba9e64e4d23670c89b23f2021c83"} Dec 13 03:12:29 crc kubenswrapper[5070]: I1213 03:12:29.505732 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerStarted","Data":"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead"} Dec 13 03:12:29 crc kubenswrapper[5070]: I1213 03:12:29.506131 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:29 crc kubenswrapper[5070]: I1213 03:12:29.506180 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:29 crc kubenswrapper[5070]: I1213 03:12:29.566901 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" podStartSLOduration=8.56688074 podStartE2EDuration="8.56688074s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:29.565890424 +0000 UTC m=+41.801733970" watchObservedRunningTime="2025-12-13 03:12:29.56688074 +0000 UTC m=+41.802724286" Dec 13 03:12:29 crc kubenswrapper[5070]: I1213 03:12:29.853400 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:30 crc kubenswrapper[5070]: I1213 03:12:30.035849 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:30 crc kubenswrapper[5070]: E1213 03:12:30.036057 5070 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 03:12:30 crc kubenswrapper[5070]: E1213 03:12:30.036115 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs podName:86e29ac2-28cb-457d-8f96-5a60c3d535e3 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.036099076 +0000 UTC m=+50.271942622 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs") pod "network-metrics-daemon-tmwbx" (UID: "86e29ac2-28cb-457d-8f96-5a60c3d535e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 03:12:30 crc kubenswrapper[5070]: I1213 03:12:30.167656 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:30 crc kubenswrapper[5070]: E1213 03:12:30.167793 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tmwbx" podUID="86e29ac2-28cb-457d-8f96-5a60c3d535e3" Dec 13 03:12:30 crc kubenswrapper[5070]: I1213 03:12:30.510825 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:30 crc kubenswrapper[5070]: I1213 03:12:30.536959 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:30 crc kubenswrapper[5070]: I1213 03:12:30.842877 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-tmwbx"] Dec 13 03:12:30 crc kubenswrapper[5070]: I1213 03:12:30.843031 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:30 crc kubenswrapper[5070]: E1213 03:12:30.843161 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tmwbx" podUID="86e29ac2-28cb-457d-8f96-5a60c3d535e3" Dec 13 03:12:31 crc kubenswrapper[5070]: I1213 03:12:31.166788 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:31 crc kubenswrapper[5070]: I1213 03:12:31.166868 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:31 crc kubenswrapper[5070]: I1213 03:12:31.166897 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:31 crc kubenswrapper[5070]: E1213 03:12:31.166944 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 03:12:31 crc kubenswrapper[5070]: E1213 03:12:31.167138 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 03:12:31 crc kubenswrapper[5070]: E1213 03:12:31.167398 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 03:12:31 crc kubenswrapper[5070]: I1213 03:12:31.515961 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" event={"ID":"a79c51b6-42c7-41c3-9f2c-39a9a88e54a4","Type":"ContainerStarted","Data":"21f47220ce48fa521068862bc9c247c769f7dc9944cbb695f474c9c9afca5a55"} Dec 13 03:12:31 crc kubenswrapper[5070]: I1213 03:12:31.535522 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-2wg5p" podStartSLOduration=10.535502278 podStartE2EDuration="10.535502278s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:31.535011974 +0000 UTC m=+43.770855550" watchObservedRunningTime="2025-12-13 03:12:31.535502278 +0000 UTC m=+43.771345824" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.166235 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.166301 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.166321 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:33 crc kubenswrapper[5070]: E1213 03:12:33.167038 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 03:12:33 crc kubenswrapper[5070]: E1213 03:12:33.166859 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.166527 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:33 crc kubenswrapper[5070]: E1213 03:12:33.167216 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 03:12:33 crc kubenswrapper[5070]: E1213 03:12:33.167311 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tmwbx" podUID="86e29ac2-28cb-457d-8f96-5a60c3d535e3" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.849726 5070 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.849910 5070 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.898383 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-z6jvd"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.898814 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.903189 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.903714 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-g4gp9"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.904318 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.904615 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.904620 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.904286 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.905725 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.908318 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qk5h4"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.909568 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.911959 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.916553 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.917046 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.917513 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-z445m"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.918294 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.921749 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.922090 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.925909 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.935930 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.936647 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.936932 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.939668 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.940482 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.942427 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.942982 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.943067 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.943265 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.943384 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.943519 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.943653 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.943792 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.944033 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.944208 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.944346 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.944402 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.944524 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.944571 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.945078 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.945164 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.946104 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.947103 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.947271 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.947317 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.947425 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.947622 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.947849 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.947921 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.947985 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.947990 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.948146 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.951889 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.952476 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.952900 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.953393 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.953730 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.954004 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.954248 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.954551 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.955534 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-bjz8x"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.955956 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-z8556"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.956293 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.956400 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.956583 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-bjz8x" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.957175 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.957468 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.957615 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.957750 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.958344 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.958523 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.958665 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.958746 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.958827 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.958857 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.958907 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.980516 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz"] Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.981917 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:33 crc kubenswrapper[5070]: I1213 03:12:33.986782 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.003204 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.003707 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.003815 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.003980 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.004113 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.004291 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.004670 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.004874 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.004911 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.005090 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.005142 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.005087 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.005644 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.005701 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7hxcj"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.005909 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9dbq\" (UniqueName: \"kubernetes.io/projected/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-kube-api-access-c9dbq\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.005938 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf452261-a66d-45bd-9155-55aa347f086a-service-ca-bundle\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.005960 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/bf452261-a66d-45bd-9155-55aa347f086a-stats-auth\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.005981 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56416152-93a7-49cb-a1f0-01577eb6cadc-audit-policies\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.005994 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf452261-a66d-45bd-9155-55aa347f086a-metrics-certs\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006011 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9829d6ca-fc22-4e71-a966-cff569f273fb-images\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006024 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-serving-cert\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006047 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-config\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006060 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e26ec275-42a6-42af-a6ed-0872bd777acf-serving-cert\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006078 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/56416152-93a7-49cb-a1f0-01577eb6cadc-encryption-config\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006091 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wx7n6\" (UniqueName: \"kubernetes.io/projected/e26ec275-42a6-42af-a6ed-0872bd777acf-kube-api-access-wx7n6\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006108 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4896d2ca-5911-48fd-a2dd-89d8af9ddf3f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4z9vz\" (UID: \"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006131 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlfs4\" (UniqueName: \"kubernetes.io/projected/47eff15d-d8a1-4f98-8579-eb8cdec723ff-kube-api-access-mlfs4\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006152 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a78b35d-1521-4a5a-9cb0-c73064d59f12-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fmcfp\" (UID: \"1a78b35d-1521-4a5a-9cb0-c73064d59f12\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006167 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t8rj\" (UniqueName: \"kubernetes.io/projected/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-kube-api-access-4t8rj\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006181 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a78b35d-1521-4a5a-9cb0-c73064d59f12-config\") pod \"kube-apiserver-operator-766d6c64bb-fmcfp\" (UID: \"1a78b35d-1521-4a5a-9cb0-c73064d59f12\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006195 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/bf452261-a66d-45bd-9155-55aa347f086a-default-certificate\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006211 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9wrk\" (UniqueName: \"kubernetes.io/projected/3293f634-1926-4bb8-b639-44d6d14263cb-kube-api-access-w9wrk\") pod \"openshift-config-operator-7777fb866f-jj2l2\" (UID: \"3293f634-1926-4bb8-b639-44d6d14263cb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006225 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006240 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/56416152-93a7-49cb-a1f0-01577eb6cadc-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006253 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-client-ca\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006268 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a78b35d-1521-4a5a-9cb0-c73064d59f12-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fmcfp\" (UID: \"1a78b35d-1521-4a5a-9cb0-c73064d59f12\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006288 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-serving-cert\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006302 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-oauth-config\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006327 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56416152-93a7-49cb-a1f0-01577eb6cadc-audit-dir\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006344 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47eff15d-d8a1-4f98-8579-eb8cdec723ff-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006362 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-service-ca\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006376 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmt77\" (UniqueName: \"kubernetes.io/projected/4896d2ca-5911-48fd-a2dd-89d8af9ddf3f-kube-api-access-vmt77\") pod \"openshift-apiserver-operator-796bbdcf4f-4z9vz\" (UID: \"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006381 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006395 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9829d6ca-fc22-4e71-a966-cff569f273fb-config\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006410 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/47eff15d-d8a1-4f98-8579-eb8cdec723ff-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006426 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e96df34c-cff8-4655-9f8c-2f0baf4f772c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6z6fd\" (UID: \"e96df34c-cff8-4655-9f8c-2f0baf4f772c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006460 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56416152-93a7-49cb-a1f0-01577eb6cadc-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006476 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2t62\" (UniqueName: \"kubernetes.io/projected/56416152-93a7-49cb-a1f0-01577eb6cadc-kube-api-access-p2t62\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006492 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9829d6ca-fc22-4e71-a966-cff569f273fb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006507 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-config\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006522 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/56416152-93a7-49cb-a1f0-01577eb6cadc-etcd-client\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006540 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9lmc\" (UniqueName: \"kubernetes.io/projected/9dffbe71-422b-480e-8bf0-e6b89f6daa88-kube-api-access-r9lmc\") pod \"migrator-59844c95c7-lrx9b\" (UID: \"9dffbe71-422b-480e-8bf0-e6b89f6daa88\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006555 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/47eff15d-d8a1-4f98-8579-eb8cdec723ff-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006591 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-trusted-ca\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006605 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4896d2ca-5911-48fd-a2dd-89d8af9ddf3f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4z9vz\" (UID: \"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006620 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56416152-93a7-49cb-a1f0-01577eb6cadc-serving-cert\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006638 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3293f634-1926-4bb8-b639-44d6d14263cb-serving-cert\") pod \"openshift-config-operator-7777fb866f-jj2l2\" (UID: \"3293f634-1926-4bb8-b639-44d6d14263cb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006652 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-config\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006675 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-oauth-serving-cert\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006694 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb2dc\" (UniqueName: \"kubernetes.io/projected/bf452261-a66d-45bd-9155-55aa347f086a-kube-api-access-jb2dc\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006711 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-trusted-ca-bundle\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006727 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mggph\" (UniqueName: \"kubernetes.io/projected/93fc381a-a5cc-4d02-bd2a-ba2898536d45-kube-api-access-mggph\") pod \"downloads-7954f5f757-bjz8x\" (UID: \"93fc381a-a5cc-4d02-bd2a-ba2898536d45\") " pod="openshift-console/downloads-7954f5f757-bjz8x" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006753 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv9vk\" (UniqueName: \"kubernetes.io/projected/9829d6ca-fc22-4e71-a966-cff569f273fb-kube-api-access-kv9vk\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006768 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3293f634-1926-4bb8-b639-44d6d14263cb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-jj2l2\" (UID: \"3293f634-1926-4bb8-b639-44d6d14263cb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.006784 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpb6w\" (UniqueName: \"kubernetes.io/projected/e96df34c-cff8-4655-9f8c-2f0baf4f772c-kube-api-access-mpb6w\") pod \"cluster-samples-operator-665b6dd947-6z6fd\" (UID: \"e96df34c-cff8-4655-9f8c-2f0baf4f772c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.007154 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-rlmj4"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.007511 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.007526 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.007678 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dwkv2"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.007961 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.008220 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.008305 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-k6vpl"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.008428 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.008780 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.010843 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.011469 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.011953 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.012059 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.012194 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.012307 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.012548 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.012575 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.012712 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-2sbvn"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.012923 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.013102 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.013143 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.013582 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.014036 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.014361 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.014465 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.014482 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.014515 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.014466 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.014375 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.014935 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015008 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015037 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015113 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015128 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015169 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015259 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015399 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015497 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015502 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015529 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015552 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.014923 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015753 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.015879 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.016176 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.016772 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.017084 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.017402 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.019224 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.019831 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.020416 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.021541 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xkp8n"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.021873 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.022075 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.023743 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.026415 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.026889 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.028274 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.031502 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.039962 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.042779 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.043161 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-z6jvd"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.043264 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.043607 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.043601 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.043982 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.044260 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.047288 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.048995 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-g4gp9"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.049048 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.049082 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.049708 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.051096 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.051660 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.051817 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.051946 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.056833 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.057497 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.058164 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.060716 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vblq5"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.063569 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l9t5h"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.064965 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.065145 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.065282 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.065401 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.065680 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.066107 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.068760 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.068765 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.069392 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-n85jm"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.069526 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.070038 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.071485 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-h8tb8"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.071997 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.072280 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-9chqz"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.073015 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.074124 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.075339 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qk5h4"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.075932 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-jgtrr"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.076512 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-jgtrr" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.077332 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-z445m"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.081466 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-bjz8x"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.083828 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.085648 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7hxcj"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.091062 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-hrb5f"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.101151 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.103765 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.108493 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9wrk\" (UniqueName: \"kubernetes.io/projected/3293f634-1926-4bb8-b639-44d6d14263cb-kube-api-access-w9wrk\") pod \"openshift-config-operator-7777fb866f-jj2l2\" (UID: \"3293f634-1926-4bb8-b639-44d6d14263cb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.108804 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.108967 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/56416152-93a7-49cb-a1f0-01577eb6cadc-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.108999 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-client-ca\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109022 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a78b35d-1521-4a5a-9cb0-c73064d59f12-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fmcfp\" (UID: \"1a78b35d-1521-4a5a-9cb0-c73064d59f12\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109043 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-serving-cert\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109058 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-oauth-config\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109076 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56416152-93a7-49cb-a1f0-01577eb6cadc-audit-dir\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109096 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47eff15d-d8a1-4f98-8579-eb8cdec723ff-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109115 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-service-ca\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109132 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmt77\" (UniqueName: \"kubernetes.io/projected/4896d2ca-5911-48fd-a2dd-89d8af9ddf3f-kube-api-access-vmt77\") pod \"openshift-apiserver-operator-796bbdcf4f-4z9vz\" (UID: \"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109153 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9829d6ca-fc22-4e71-a966-cff569f273fb-config\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109171 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/47eff15d-d8a1-4f98-8579-eb8cdec723ff-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109192 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e96df34c-cff8-4655-9f8c-2f0baf4f772c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6z6fd\" (UID: \"e96df34c-cff8-4655-9f8c-2f0baf4f772c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109220 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56416152-93a7-49cb-a1f0-01577eb6cadc-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109241 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2t62\" (UniqueName: \"kubernetes.io/projected/56416152-93a7-49cb-a1f0-01577eb6cadc-kube-api-access-p2t62\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109260 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9829d6ca-fc22-4e71-a966-cff569f273fb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109277 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-config\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109296 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/56416152-93a7-49cb-a1f0-01577eb6cadc-etcd-client\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109323 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9lmc\" (UniqueName: \"kubernetes.io/projected/9dffbe71-422b-480e-8bf0-e6b89f6daa88-kube-api-access-r9lmc\") pod \"migrator-59844c95c7-lrx9b\" (UID: \"9dffbe71-422b-480e-8bf0-e6b89f6daa88\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109361 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/47eff15d-d8a1-4f98-8579-eb8cdec723ff-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109827 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-trusted-ca\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109867 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4896d2ca-5911-48fd-a2dd-89d8af9ddf3f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4z9vz\" (UID: \"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109905 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56416152-93a7-49cb-a1f0-01577eb6cadc-serving-cert\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109934 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3293f634-1926-4bb8-b639-44d6d14263cb-serving-cert\") pod \"openshift-config-operator-7777fb866f-jj2l2\" (UID: \"3293f634-1926-4bb8-b639-44d6d14263cb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.109966 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-config\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.110010 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-oauth-serving-cert\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.110042 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb2dc\" (UniqueName: \"kubernetes.io/projected/bf452261-a66d-45bd-9155-55aa347f086a-kube-api-access-jb2dc\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.110133 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.110267 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.110360 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-service-ca\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.110410 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.110826 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56416152-93a7-49cb-a1f0-01577eb6cadc-audit-dir\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.111977 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47eff15d-d8a1-4f98-8579-eb8cdec723ff-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.112609 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-client-ca\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.112727 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-trusted-ca-bundle\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.113033 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9829d6ca-fc22-4e71-a966-cff569f273fb-config\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.113093 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mggph\" (UniqueName: \"kubernetes.io/projected/93fc381a-a5cc-4d02-bd2a-ba2898536d45-kube-api-access-mggph\") pod \"downloads-7954f5f757-bjz8x\" (UID: \"93fc381a-a5cc-4d02-bd2a-ba2898536d45\") " pod="openshift-console/downloads-7954f5f757-bjz8x" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.113381 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv9vk\" (UniqueName: \"kubernetes.io/projected/9829d6ca-fc22-4e71-a966-cff569f273fb-kube-api-access-kv9vk\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.113574 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56416152-93a7-49cb-a1f0-01577eb6cadc-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.113278 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.113422 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3293f634-1926-4bb8-b639-44d6d14263cb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-jj2l2\" (UID: \"3293f634-1926-4bb8-b639-44d6d14263cb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.113825 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpb6w\" (UniqueName: \"kubernetes.io/projected/e96df34c-cff8-4655-9f8c-2f0baf4f772c-kube-api-access-mpb6w\") pod \"cluster-samples-operator-665b6dd947-6z6fd\" (UID: \"e96df34c-cff8-4655-9f8c-2f0baf4f772c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.113879 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkjbr\" (UniqueName: \"kubernetes.io/projected/6dca72ea-876f-4b17-90ee-62f8287ea7d6-kube-api-access-zkjbr\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.114050 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3293f634-1926-4bb8-b639-44d6d14263cb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-jj2l2\" (UID: \"3293f634-1926-4bb8-b639-44d6d14263cb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.114247 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/56416152-93a7-49cb-a1f0-01577eb6cadc-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.114795 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-trusted-ca-bundle\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.115523 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-oauth-serving-cert\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.115978 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9dbq\" (UniqueName: \"kubernetes.io/projected/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-kube-api-access-c9dbq\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.116034 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf452261-a66d-45bd-9155-55aa347f086a-service-ca-bundle\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.116109 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4896d2ca-5911-48fd-a2dd-89d8af9ddf3f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4z9vz\" (UID: \"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.116280 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e96df34c-cff8-4655-9f8c-2f0baf4f772c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6z6fd\" (UID: \"e96df34c-cff8-4655-9f8c-2f0baf4f772c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.116368 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-config\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.116475 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/bf452261-a66d-45bd-9155-55aa347f086a-stats-auth\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.116542 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56416152-93a7-49cb-a1f0-01577eb6cadc-audit-policies\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.116543 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-config\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.116576 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf452261-a66d-45bd-9155-55aa347f086a-metrics-certs\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.116970 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf452261-a66d-45bd-9155-55aa347f086a-service-ca-bundle\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.117524 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-trusted-ca\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.117560 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56416152-93a7-49cb-a1f0-01577eb6cadc-audit-policies\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.118553 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/47eff15d-d8a1-4f98-8579-eb8cdec723ff-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.118815 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6dca72ea-876f-4b17-90ee-62f8287ea7d6-serving-cert\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.118887 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9829d6ca-fc22-4e71-a966-cff569f273fb-images\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.118912 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-serving-cert\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.118982 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-config\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.119086 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e26ec275-42a6-42af-a6ed-0872bd777acf-serving-cert\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.119149 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/56416152-93a7-49cb-a1f0-01577eb6cadc-encryption-config\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.119973 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56416152-93a7-49cb-a1f0-01577eb6cadc-serving-cert\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.120022 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-config\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.120550 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wx7n6\" (UniqueName: \"kubernetes.io/projected/e26ec275-42a6-42af-a6ed-0872bd777acf-kube-api-access-wx7n6\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.120615 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9829d6ca-fc22-4e71-a966-cff569f273fb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.120656 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4896d2ca-5911-48fd-a2dd-89d8af9ddf3f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4z9vz\" (UID: \"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.120710 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dca72ea-876f-4b17-90ee-62f8287ea7d6-config\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.121523 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlfs4\" (UniqueName: \"kubernetes.io/projected/47eff15d-d8a1-4f98-8579-eb8cdec723ff-kube-api-access-mlfs4\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.121989 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bf452261-a66d-45bd-9155-55aa347f086a-metrics-certs\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.122021 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/56416152-93a7-49cb-a1f0-01577eb6cadc-etcd-client\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.122066 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3293f634-1926-4bb8-b639-44d6d14263cb-serving-cert\") pod \"openshift-config-operator-7777fb866f-jj2l2\" (UID: \"3293f634-1926-4bb8-b639-44d6d14263cb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.122067 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9829d6ca-fc22-4e71-a966-cff569f273fb-images\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.122686 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.122779 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/bf452261-a66d-45bd-9155-55aa347f086a-stats-auth\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.123280 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-serving-cert\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.123956 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-k6vpl"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.124103 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a78b35d-1521-4a5a-9cb0-c73064d59f12-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fmcfp\" (UID: \"1a78b35d-1521-4a5a-9cb0-c73064d59f12\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.124265 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/56416152-93a7-49cb-a1f0-01577eb6cadc-encryption-config\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.121891 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a78b35d-1521-4a5a-9cb0-c73064d59f12-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fmcfp\" (UID: \"1a78b35d-1521-4a5a-9cb0-c73064d59f12\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.124585 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t8rj\" (UniqueName: \"kubernetes.io/projected/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-kube-api-access-4t8rj\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.124618 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a78b35d-1521-4a5a-9cb0-c73064d59f12-config\") pod \"kube-apiserver-operator-766d6c64bb-fmcfp\" (UID: \"1a78b35d-1521-4a5a-9cb0-c73064d59f12\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.124642 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/bf452261-a66d-45bd-9155-55aa347f086a-default-certificate\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.125169 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-2sbvn"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.125340 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a78b35d-1521-4a5a-9cb0-c73064d59f12-config\") pod \"kube-apiserver-operator-766d6c64bb-fmcfp\" (UID: \"1a78b35d-1521-4a5a-9cb0-c73064d59f12\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.126081 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.126285 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4896d2ca-5911-48fd-a2dd-89d8af9ddf3f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4z9vz\" (UID: \"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.126391 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l9t5h"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.127642 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-serving-cert\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.128552 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.128672 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/bf452261-a66d-45bd-9155-55aa347f086a-default-certificate\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.129976 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xkp8n"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.131239 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dwkv2"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.132530 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.133330 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-oauth-config\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.134529 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.135637 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.136812 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.137843 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.139017 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.139966 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.140741 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.141069 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.142077 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vblq5"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.143578 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.144337 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.145531 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-rlmj4"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.146537 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.147478 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.148422 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.149207 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.150124 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6fssx"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.151141 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e26ec275-42a6-42af-a6ed-0872bd777acf-serving-cert\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.152101 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-jgtrr"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.152120 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-h8tb8"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.152197 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.153055 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-n85jm"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.154960 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6fssx"] Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.161092 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.166684 5070 scope.go:117] "RemoveContainer" containerID="bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.180963 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.201850 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.241870 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e0009fee-6991-4819-ae3d-2d075aa961af-audit-dir\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.241989 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dca72ea-876f-4b17-90ee-62f8287ea7d6-config\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242107 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvcfp\" (UniqueName: \"kubernetes.io/projected/e0009fee-6991-4819-ae3d-2d075aa961af-kube-api-access-nvcfp\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242136 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m65zn\" (UniqueName: \"kubernetes.io/projected/725fea52-a164-4df9-9bbf-13cda9f52cb6-kube-api-access-m65zn\") pod \"kube-storage-version-migrator-operator-b67b599dd-6dxqs\" (UID: \"725fea52-a164-4df9-9bbf-13cda9f52cb6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242178 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3cd4a611-b694-4ee2-9290-b62ad8854af3-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rx5gc\" (UID: \"3cd4a611-b694-4ee2-9290-b62ad8854af3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242247 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2d4s6\" (UniqueName: \"kubernetes.io/projected/3cd4a611-b694-4ee2-9290-b62ad8854af3-kube-api-access-2d4s6\") pod \"olm-operator-6b444d44fb-rx5gc\" (UID: \"3cd4a611-b694-4ee2-9290-b62ad8854af3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242286 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242326 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242347 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242370 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242402 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/725fea52-a164-4df9-9bbf-13cda9f52cb6-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6dxqs\" (UID: \"725fea52-a164-4df9-9bbf-13cda9f52cb6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242428 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3cd4a611-b694-4ee2-9290-b62ad8854af3-srv-cert\") pod \"olm-operator-6b444d44fb-rx5gc\" (UID: \"3cd4a611-b694-4ee2-9290-b62ad8854af3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242473 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242512 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242533 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/725fea52-a164-4df9-9bbf-13cda9f52cb6-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6dxqs\" (UID: \"725fea52-a164-4df9-9bbf-13cda9f52cb6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242588 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242624 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242661 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242741 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkjbr\" (UniqueName: \"kubernetes.io/projected/6dca72ea-876f-4b17-90ee-62f8287ea7d6-kube-api-access-zkjbr\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242942 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6dca72ea-876f-4b17-90ee-62f8287ea7d6-serving-cert\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.242975 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.243013 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.243044 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-audit-policies\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.244672 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.245320 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.261159 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.280787 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.301284 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.321873 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.341244 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.343914 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvcfp\" (UniqueName: \"kubernetes.io/projected/e0009fee-6991-4819-ae3d-2d075aa961af-kube-api-access-nvcfp\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.343954 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m65zn\" (UniqueName: \"kubernetes.io/projected/725fea52-a164-4df9-9bbf-13cda9f52cb6-kube-api-access-m65zn\") pod \"kube-storage-version-migrator-operator-b67b599dd-6dxqs\" (UID: \"725fea52-a164-4df9-9bbf-13cda9f52cb6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344006 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3cd4a611-b694-4ee2-9290-b62ad8854af3-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rx5gc\" (UID: \"3cd4a611-b694-4ee2-9290-b62ad8854af3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344030 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2d4s6\" (UniqueName: \"kubernetes.io/projected/3cd4a611-b694-4ee2-9290-b62ad8854af3-kube-api-access-2d4s6\") pod \"olm-operator-6b444d44fb-rx5gc\" (UID: \"3cd4a611-b694-4ee2-9290-b62ad8854af3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344089 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344118 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344140 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344161 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344192 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/725fea52-a164-4df9-9bbf-13cda9f52cb6-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6dxqs\" (UID: \"725fea52-a164-4df9-9bbf-13cda9f52cb6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344217 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3cd4a611-b694-4ee2-9290-b62ad8854af3-srv-cert\") pod \"olm-operator-6b444d44fb-rx5gc\" (UID: \"3cd4a611-b694-4ee2-9290-b62ad8854af3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344242 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344283 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344303 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/725fea52-a164-4df9-9bbf-13cda9f52cb6-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6dxqs\" (UID: \"725fea52-a164-4df9-9bbf-13cda9f52cb6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344335 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344358 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344380 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344501 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344524 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344554 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-audit-policies\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344570 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e0009fee-6991-4819-ae3d-2d075aa961af-audit-dir\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.344691 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e0009fee-6991-4819-ae3d-2d075aa961af-audit-dir\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.346489 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.348938 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.349032 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.349472 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-audit-policies\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.350116 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.350692 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.351312 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.352794 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.353939 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.355997 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.356670 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.358968 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.361282 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.381215 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.401121 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.407727 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/725fea52-a164-4df9-9bbf-13cda9f52cb6-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6dxqs\" (UID: \"725fea52-a164-4df9-9bbf-13cda9f52cb6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.420879 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.426286 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/725fea52-a164-4df9-9bbf-13cda9f52cb6-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6dxqs\" (UID: \"725fea52-a164-4df9-9bbf-13cda9f52cb6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.441173 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.460781 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.481213 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.500871 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.521740 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.541536 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.561519 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.581068 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.600735 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.621979 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.641232 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.661861 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.702469 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.720903 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.751304 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.761333 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.781440 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.801515 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.821626 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.841140 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.860958 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.881237 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.901564 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.921705 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.941776 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.960852 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 13 03:12:34 crc kubenswrapper[5070]: I1213 03:12:34.982842 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.002207 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.021995 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.041576 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.054524 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.054747 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.054806 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.054856 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.054969 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055117 5070 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055175 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:51.055157238 +0000 UTC m=+63.291000784 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055412 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:51.055398345 +0000 UTC m=+63.291241901 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055547 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055772 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055789 5070 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055819 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:51.055810615 +0000 UTC m=+63.291654161 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055620 5070 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055853 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:51.055847506 +0000 UTC m=+63.291691052 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055662 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055877 5070 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055889 5070 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.055915 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:51.055907698 +0000 UTC m=+63.291751244 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.059828 5070 request.go:700] Waited for 1.016019741s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmcc-proxy-tls&limit=500&resourceVersion=0 Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.061432 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.081388 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.101776 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.121868 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.140492 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.148666 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3cd4a611-b694-4ee2-9290-b62ad8854af3-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rx5gc\" (UID: \"3cd4a611-b694-4ee2-9290-b62ad8854af3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.161355 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.166037 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.166074 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.166112 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.166043 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.168861 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3cd4a611-b694-4ee2-9290-b62ad8854af3-srv-cert\") pod \"olm-operator-6b444d44fb-rx5gc\" (UID: \"3cd4a611-b694-4ee2-9290-b62ad8854af3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.181208 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.201032 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.221057 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.240668 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.242929 5070 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.243030 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6dca72ea-876f-4b17-90ee-62f8287ea7d6-config podName:6dca72ea-876f-4b17-90ee-62f8287ea7d6 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:35.742997817 +0000 UTC m=+47.978841363 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/6dca72ea-876f-4b17-90ee-62f8287ea7d6-config") pod "service-ca-operator-777779d784-dvc5s" (UID: "6dca72ea-876f-4b17-90ee-62f8287ea7d6") : failed to sync configmap cache: timed out waiting for the condition Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.243253 5070 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 13 03:12:35 crc kubenswrapper[5070]: E1213 03:12:35.243307 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6dca72ea-876f-4b17-90ee-62f8287ea7d6-serving-cert podName:6dca72ea-876f-4b17-90ee-62f8287ea7d6 nodeName:}" failed. No retries permitted until 2025-12-13 03:12:35.743299655 +0000 UTC m=+47.979143201 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/6dca72ea-876f-4b17-90ee-62f8287ea7d6-serving-cert") pod "service-ca-operator-777779d784-dvc5s" (UID: "6dca72ea-876f-4b17-90ee-62f8287ea7d6") : failed to sync secret cache: timed out waiting for the condition Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.260533 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.281510 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.301067 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.320840 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.340584 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.368917 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.383027 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.401068 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.421387 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.441733 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.461245 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.482135 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.501077 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.521669 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.530189 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.531855 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3c7166d048c7f6f352babbbbff3d8526971042faedd18a600098c5a1edc72db4"} Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.532302 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.541850 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.561838 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.580426 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.601087 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.621312 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.641315 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.661262 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.681235 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.702049 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.722363 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.742316 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.760757 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-sysctl-allowlist" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.765897 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6dca72ea-876f-4b17-90ee-62f8287ea7d6-serving-cert\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.765969 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dca72ea-876f-4b17-90ee-62f8287ea7d6-config\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.766858 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dca72ea-876f-4b17-90ee-62f8287ea7d6-config\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.773908 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6dca72ea-876f-4b17-90ee-62f8287ea7d6-serving-cert\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.780965 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.801734 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.821420 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.841639 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.862058 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.882406 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.901935 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.942731 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9wrk\" (UniqueName: \"kubernetes.io/projected/3293f634-1926-4bb8-b639-44d6d14263cb-kube-api-access-w9wrk\") pod \"openshift-config-operator-7777fb866f-jj2l2\" (UID: \"3293f634-1926-4bb8-b639-44d6d14263cb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.981674 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmt77\" (UniqueName: \"kubernetes.io/projected/4896d2ca-5911-48fd-a2dd-89d8af9ddf3f-kube-api-access-vmt77\") pod \"openshift-apiserver-operator-796bbdcf4f-4z9vz\" (UID: \"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:35 crc kubenswrapper[5070]: I1213 03:12:35.999603 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/47eff15d-d8a1-4f98-8579-eb8cdec723ff-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.022736 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mggph\" (UniqueName: \"kubernetes.io/projected/93fc381a-a5cc-4d02-bd2a-ba2898536d45-kube-api-access-mggph\") pod \"downloads-7954f5f757-bjz8x\" (UID: \"93fc381a-a5cc-4d02-bd2a-ba2898536d45\") " pod="openshift-console/downloads-7954f5f757-bjz8x" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.036902 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb2dc\" (UniqueName: \"kubernetes.io/projected/bf452261-a66d-45bd-9155-55aa347f086a-kube-api-access-jb2dc\") pod \"router-default-5444994796-z8556\" (UID: \"bf452261-a66d-45bd-9155-55aa347f086a\") " pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.057860 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpb6w\" (UniqueName: \"kubernetes.io/projected/e96df34c-cff8-4655-9f8c-2f0baf4f772c-kube-api-access-mpb6w\") pod \"cluster-samples-operator-665b6dd947-6z6fd\" (UID: \"e96df34c-cff8-4655-9f8c-2f0baf4f772c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.079930 5070 request.go:700] Waited for 1.965579957s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-storage-version-migrator/serviceaccounts/kube-storage-version-migrator-sa/token Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.085289 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.085778 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv9vk\" (UniqueName: \"kubernetes.io/projected/9829d6ca-fc22-4e71-a966-cff569f273fb-kube-api-access-kv9vk\") pod \"machine-api-operator-5694c8668f-z445m\" (UID: \"9829d6ca-fc22-4e71-a966-cff569f273fb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.104140 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9lmc\" (UniqueName: \"kubernetes.io/projected/9dffbe71-422b-480e-8bf0-e6b89f6daa88-kube-api-access-r9lmc\") pod \"migrator-59844c95c7-lrx9b\" (UID: \"9dffbe71-422b-480e-8bf0-e6b89f6daa88\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.120349 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.130019 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.152743 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2t62\" (UniqueName: \"kubernetes.io/projected/56416152-93a7-49cb-a1f0-01577eb6cadc-kube-api-access-p2t62\") pod \"apiserver-7bbb656c7d-s276q\" (UID: \"56416152-93a7-49cb-a1f0-01577eb6cadc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.153879 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9dbq\" (UniqueName: \"kubernetes.io/projected/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-kube-api-access-c9dbq\") pod \"console-f9d7485db-z6jvd\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.165925 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.166251 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wx7n6\" (UniqueName: \"kubernetes.io/projected/e26ec275-42a6-42af-a6ed-0872bd777acf-kube-api-access-wx7n6\") pod \"controller-manager-879f6c89f-qk5h4\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.181279 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.196148 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlfs4\" (UniqueName: \"kubernetes.io/projected/47eff15d-d8a1-4f98-8579-eb8cdec723ff-kube-api-access-mlfs4\") pod \"cluster-image-registry-operator-dc59b4c8b-5clz7\" (UID: \"47eff15d-d8a1-4f98-8579-eb8cdec723ff\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.204230 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-bjz8x" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.206168 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a78b35d-1521-4a5a-9cb0-c73064d59f12-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fmcfp\" (UID: \"1a78b35d-1521-4a5a-9cb0-c73064d59f12\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.222175 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.226655 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t8rj\" (UniqueName: \"kubernetes.io/projected/9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b-kube-api-access-4t8rj\") pod \"console-operator-58897d9998-g4gp9\" (UID: \"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b\") " pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.244164 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.261620 5070 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.265946 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.310505 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkjbr\" (UniqueName: \"kubernetes.io/projected/6dca72ea-876f-4b17-90ee-62f8287ea7d6-kube-api-access-zkjbr\") pod \"service-ca-operator-777779d784-dvc5s\" (UID: \"6dca72ea-876f-4b17-90ee-62f8287ea7d6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.322844 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.325738 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvcfp\" (UniqueName: \"kubernetes.io/projected/e0009fee-6991-4819-ae3d-2d075aa961af-kube-api-access-nvcfp\") pod \"oauth-openshift-558db77b4-dwkv2\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.331365 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd"] Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.334690 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.343310 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m65zn\" (UniqueName: \"kubernetes.io/projected/725fea52-a164-4df9-9bbf-13cda9f52cb6-kube-api-access-m65zn\") pod \"kube-storage-version-migrator-operator-b67b599dd-6dxqs\" (UID: \"725fea52-a164-4df9-9bbf-13cda9f52cb6\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.344329 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.356215 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.359716 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2d4s6\" (UniqueName: \"kubernetes.io/projected/3cd4a611-b694-4ee2-9290-b62ad8854af3-kube-api-access-2d4s6\") pod \"olm-operator-6b444d44fb-rx5gc\" (UID: \"3cd4a611-b694-4ee2-9290-b62ad8854af3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.374384 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.383128 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.403188 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.421911 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.432031 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-z445m"] Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.441533 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.449622 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.461605 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.468253 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.472202 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b"] Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.489749 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.538118 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-z8556" event={"ID":"bf452261-a66d-45bd-9155-55aa347f086a","Type":"ContainerStarted","Data":"129dc8a2c3632452bda30550739a0d9b2b8ae953c1085df842b05ff8dca5a5a7"} Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.543957 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" event={"ID":"9829d6ca-fc22-4e71-a966-cff569f273fb","Type":"ContainerStarted","Data":"91417cd4c5a7c34f0b21246029b245fb1f59e125ec0daddf7a85eaf0257f562c"} Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.554268 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-bjz8x"] Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.564047 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz"] Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.597888 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2"] Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.656975 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-g4gp9"] Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.697244 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.697534 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.697879 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699255 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699312 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lbn2\" (UniqueName: \"kubernetes.io/projected/11d847a0-fe18-4f4a-8d65-b0f64b643c68-kube-api-access-9lbn2\") pod \"collect-profiles-29426580-zzsjs\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699350 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-tls\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699370 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-bound-sa-token\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699410 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/472601ba-cee2-4f6e-ac53-a5606ef0469f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699459 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-proxy-tls\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699484 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/68d97f24-c17b-4120-be6e-48655b5efe88-etcd-service-ca\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699524 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-auth-proxy-config\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699558 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-certificates\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699584 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-images\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699608 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7tpn\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-kube-api-access-q7tpn\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699627 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjzz6\" (UniqueName: \"kubernetes.io/projected/68d97f24-c17b-4120-be6e-48655b5efe88-kube-api-access-rjzz6\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699647 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11d847a0-fe18-4f4a-8d65-b0f64b643c68-config-volume\") pod \"collect-profiles-29426580-zzsjs\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699669 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68d97f24-c17b-4120-be6e-48655b5efe88-serving-cert\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699694 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/68d97f24-c17b-4120-be6e-48655b5efe88-etcd-client\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699727 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/68d97f24-c17b-4120-be6e-48655b5efe88-etcd-ca\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699752 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwll8\" (UniqueName: \"kubernetes.io/projected/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-kube-api-access-lwll8\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699772 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d97f24-c17b-4120-be6e-48655b5efe88-config\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699797 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/11d847a0-fe18-4f4a-8d65-b0f64b643c68-secret-volume\") pod \"collect-profiles-29426580-zzsjs\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699822 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/472601ba-cee2-4f6e-ac53-a5606ef0469f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.699844 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-trusted-ca\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: E1213 03:12:36.700202 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.200188483 +0000 UTC m=+49.436032029 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:36 crc kubenswrapper[5070]: W1213 03:12:36.714139 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93fc381a_a5cc_4d02_bd2a_ba2898536d45.slice/crio-6b9c36b6ce0ec0bcac02fb353b800766c57fb9913f94aba3e935adaf8c7f2b65 WatchSource:0}: Error finding container 6b9c36b6ce0ec0bcac02fb353b800766c57fb9913f94aba3e935adaf8c7f2b65: Status 404 returned error can't find the container with id 6b9c36b6ce0ec0bcac02fb353b800766c57fb9913f94aba3e935adaf8c7f2b65 Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.739908 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q"] Dec 13 03:12:36 crc kubenswrapper[5070]: W1213 03:12:36.789422 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9dffbe71_422b_480e_8bf0_e6b89f6daa88.slice/crio-587a0b9440f183646477f81a1cbf8347958b1ff2266e01ec65c609b42dbbe3e0 WatchSource:0}: Error finding container 587a0b9440f183646477f81a1cbf8347958b1ff2266e01ec65c609b42dbbe3e0: Status 404 returned error can't find the container with id 587a0b9440f183646477f81a1cbf8347958b1ff2266e01ec65c609b42dbbe3e0 Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.803559 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:36 crc kubenswrapper[5070]: E1213 03:12:36.803685 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.303642084 +0000 UTC m=+49.539485630 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.803966 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/088b1a9f-bf10-4751-875e-092b9c149cfa-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-h8tb8\" (UID: \"088b1a9f-bf10-4751-875e-092b9c149cfa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.803990 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/79783d23-8a02-4af8-bc4b-8e8f74dae08e-profile-collector-cert\") pod \"catalog-operator-68c6474976-kpb6p\" (UID: \"79783d23-8a02-4af8-bc4b-8e8f74dae08e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804004 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b4996b72-90e0-46a0-a0e9-fed852729a89-signing-key\") pod \"service-ca-9c57cc56f-l9t5h\" (UID: \"b4996b72-90e0-46a0-a0e9-fed852729a89\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804020 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2ms9\" (UniqueName: \"kubernetes.io/projected/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-kube-api-access-l2ms9\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804039 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwnzg\" (UniqueName: \"kubernetes.io/projected/79783d23-8a02-4af8-bc4b-8e8f74dae08e-kube-api-access-pwnzg\") pod \"catalog-operator-68c6474976-kpb6p\" (UID: \"79783d23-8a02-4af8-bc4b-8e8f74dae08e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804057 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5dbe31ab-6e43-41c3-8f14-e1ac87938038-proxy-tls\") pod \"machine-config-controller-84d6567774-6td6p\" (UID: \"5dbe31ab-6e43-41c3-8f14-e1ac87938038\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804086 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-proxy-tls\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804103 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-mountpoint-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804139 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/68d97f24-c17b-4120-be6e-48655b5efe88-etcd-service-ca\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804187 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbjzr\" (UniqueName: \"kubernetes.io/projected/44e54907-8f0d-4c3c-960c-e1e5bcad7523-kube-api-access-wbjzr\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804204 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e55788c5-581c-4013-8c44-83a0b5f74b0a-encryption-config\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804223 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/849cc5eb-fe81-4755-a13e-56cdc7b4f248-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-fhh5m\" (UID: \"849cc5eb-fe81-4755-a13e-56cdc7b4f248\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804239 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b047179-d09b-486d-8e47-2c110a710e51-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-m7blq\" (UID: \"5b047179-d09b-486d-8e47-2c110a710e51\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804267 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-auth-proxy-config\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804295 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx678\" (UniqueName: \"kubernetes.io/projected/8cfebcb5-7935-4f04-8d87-40dce0dc5ef7-kube-api-access-mx678\") pod \"machine-config-server-hrb5f\" (UID: \"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7\") " pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804312 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804328 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q7lj\" (UniqueName: \"kubernetes.io/projected/994638f3-a23b-445a-b2d9-929361c0a5e3-kube-api-access-4q7lj\") pod \"dns-operator-744455d44c-2sbvn\" (UID: \"994638f3-a23b-445a-b2d9-929361c0a5e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804343 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-plugins-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804369 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-certificates\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804387 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/44e54907-8f0d-4c3c-960c-e1e5bcad7523-auth-proxy-config\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804406 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a508f977-bce2-4269-ae18-1c655e9befd8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ckvrb\" (UID: \"a508f977-bce2-4269-ae18-1c655e9befd8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804434 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a54536a5-e3a5-40e5-8864-f799bdfff6ca-metrics-tls\") pod \"dns-default-n85jm\" (UID: \"a54536a5-e3a5-40e5-8864-f799bdfff6ca\") " pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804465 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjzz6\" (UniqueName: \"kubernetes.io/projected/68d97f24-c17b-4120-be6e-48655b5efe88-kube-api-access-rjzz6\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804482 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-image-import-ca\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804501 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b047179-d09b-486d-8e47-2c110a710e51-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-m7blq\" (UID: \"5b047179-d09b-486d-8e47-2c110a710e51\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804517 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ba4ef14-c563-4903-b359-f80b487d8ced-serving-cert\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804544 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68d97f24-c17b-4120-be6e-48655b5efe88-serving-cert\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804561 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11d847a0-fe18-4f4a-8d65-b0f64b643c68-config-volume\") pod \"collect-profiles-29426580-zzsjs\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804579 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjdmv\" (UniqueName: \"kubernetes.io/projected/04b427fa-011a-4d5c-8844-48bb0c3f319e-kube-api-access-bjdmv\") pod \"package-server-manager-789f6589d5-grwvc\" (UID: \"04b427fa-011a-4d5c-8844-48bb0c3f319e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804595 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4vkp\" (UniqueName: \"kubernetes.io/projected/74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54-kube-api-access-x4vkp\") pod \"ingress-canary-jgtrr\" (UID: \"74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54\") " pod="openshift-ingress-canary/ingress-canary-jgtrr" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804610 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-client-ca\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804653 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/68d97f24-c17b-4120-be6e-48655b5efe88-etcd-client\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804689 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/68d97f24-c17b-4120-be6e-48655b5efe88-etcd-ca\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804706 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgzzs\" (UniqueName: \"kubernetes.io/projected/a54536a5-e3a5-40e5-8864-f799bdfff6ca-kube-api-access-zgzzs\") pod \"dns-default-n85jm\" (UID: \"a54536a5-e3a5-40e5-8864-f799bdfff6ca\") " pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804720 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b4996b72-90e0-46a0-a0e9-fed852729a89-signing-cabundle\") pod \"service-ca-9c57cc56f-l9t5h\" (UID: \"b4996b72-90e0-46a0-a0e9-fed852729a89\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804745 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwll8\" (UniqueName: \"kubernetes.io/projected/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-kube-api-access-lwll8\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804759 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d97f24-c17b-4120-be6e-48655b5efe88-config\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804774 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e55788c5-581c-4013-8c44-83a0b5f74b0a-node-pullsecrets\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804802 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804827 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/11d847a0-fe18-4f4a-8d65-b0f64b643c68-secret-volume\") pod \"collect-profiles-29426580-zzsjs\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804850 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/8cfebcb5-7935-4f04-8d87-40dce0dc5ef7-certs\") pod \"machine-config-server-hrb5f\" (UID: \"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7\") " pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804885 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vblq5\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804910 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-ready\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804925 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a54536a5-e3a5-40e5-8864-f799bdfff6ca-config-volume\") pod \"dns-default-n85jm\" (UID: \"a54536a5-e3a5-40e5-8864-f799bdfff6ca\") " pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804948 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/79783d23-8a02-4af8-bc4b-8e8f74dae08e-srv-cert\") pod \"catalog-operator-68c6474976-kpb6p\" (UID: \"79783d23-8a02-4af8-bc4b-8e8f74dae08e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804964 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-config\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.804991 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xfr5\" (UniqueName: \"kubernetes.io/projected/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-kube-api-access-5xfr5\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805018 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-config\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805033 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htt95\" (UniqueName: \"kubernetes.io/projected/b4996b72-90e0-46a0-a0e9-fed852729a89-kube-api-access-htt95\") pod \"service-ca-9c57cc56f-l9t5h\" (UID: \"b4996b72-90e0-46a0-a0e9-fed852729a89\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805048 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-service-ca-bundle\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805075 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/472601ba-cee2-4f6e-ac53-a5606ef0469f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805091 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vblq5\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805105 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-config\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805122 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glgjt\" (UniqueName: \"kubernetes.io/projected/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-kube-api-access-glgjt\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805137 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/994638f3-a23b-445a-b2d9-929361c0a5e3-metrics-tls\") pod \"dns-operator-744455d44c-2sbvn\" (UID: \"994638f3-a23b-445a-b2d9-929361c0a5e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805152 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-etcd-serving-ca\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805176 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/8cfebcb5-7935-4f04-8d87-40dce0dc5ef7-node-bootstrap-token\") pod \"machine-config-server-hrb5f\" (UID: \"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7\") " pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805190 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e55788c5-581c-4013-8c44-83a0b5f74b0a-etcd-client\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805206 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8t4d7\" (UniqueName: \"kubernetes.io/projected/d0e149ba-a883-49ad-a21f-40b99873662b-kube-api-access-8t4d7\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805233 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68bpt\" (UniqueName: \"kubernetes.io/projected/088b1a9f-bf10-4751-875e-092b9c149cfa-kube-api-access-68bpt\") pod \"multus-admission-controller-857f4d67dd-h8tb8\" (UID: \"088b1a9f-bf10-4751-875e-092b9c149cfa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805247 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k79b6\" (UniqueName: \"kubernetes.io/projected/d84fad97-769a-4f5d-8e19-d91d308675f6-kube-api-access-k79b6\") pod \"marketplace-operator-79b997595-vblq5\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.805472 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m6ct\" (UniqueName: \"kubernetes.io/projected/e55788c5-581c-4013-8c44-83a0b5f74b0a-kube-api-access-6m6ct\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.807632 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-certificates\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.808022 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-auth-proxy-config\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.808092 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/44e54907-8f0d-4c3c-960c-e1e5bcad7523-machine-approver-tls\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.808117 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e55788c5-581c-4013-8c44-83a0b5f74b0a-audit-dir\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.808146 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62gjl\" (UniqueName: \"kubernetes.io/projected/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-kube-api-access-62gjl\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.808168 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltgfq\" (UniqueName: \"kubernetes.io/projected/5dbe31ab-6e43-41c3-8f14-e1ac87938038-kube-api-access-ltgfq\") pod \"machine-config-controller-84d6567774-6td6p\" (UID: \"5dbe31ab-6e43-41c3-8f14-e1ac87938038\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.808860 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/68d97f24-c17b-4120-be6e-48655b5efe88-etcd-service-ca\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.818153 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11d847a0-fe18-4f4a-8d65-b0f64b643c68-config-volume\") pod \"collect-profiles-29426580-zzsjs\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.818432 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/68d97f24-c17b-4120-be6e-48655b5efe88-etcd-ca\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.818840 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d97f24-c17b-4120-be6e-48655b5efe88-config\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.824484 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e55788c5-581c-4013-8c44-83a0b5f74b0a-serving-cert\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.824948 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7tpn\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-kube-api-access-q7tpn\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.825039 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-images\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.825072 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-metrics-tls\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.825665 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/68d97f24-c17b-4120-be6e-48655b5efe88-etcd-client\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.829540 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a508f977-bce2-4269-ae18-1c655e9befd8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ckvrb\" (UID: \"a508f977-bce2-4269-ae18-1c655e9befd8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.830018 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-audit\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.830963 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-bound-sa-token\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.831049 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-serving-cert\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.831212 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.831295 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.831353 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5dbe31ab-6e43-41c3-8f14-e1ac87938038-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6td6p\" (UID: \"5dbe31ab-6e43-41c3-8f14-e1ac87938038\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.831530 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-apiservice-cert\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.831622 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/849cc5eb-fe81-4755-a13e-56cdc7b4f248-config\") pod \"kube-controller-manager-operator-78b949d7b-fhh5m\" (UID: \"849cc5eb-fe81-4755-a13e-56cdc7b4f248\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.831779 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/472601ba-cee2-4f6e-ac53-a5606ef0469f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.831903 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5h54\" (UniqueName: \"kubernetes.io/projected/1ba4ef14-c563-4903-b359-f80b487d8ced-kube-api-access-m5h54\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.832027 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6234fc7c-52ec-4021-b04f-0264df7a6307-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dlrd7\" (UID: \"6234fc7c-52ec-4021-b04f-0264df7a6307\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.832084 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4h4x\" (UniqueName: \"kubernetes.io/projected/6234fc7c-52ec-4021-b04f-0264df7a6307-kube-api-access-r4h4x\") pod \"control-plane-machine-set-operator-78cbb6b69f-dlrd7\" (UID: \"6234fc7c-52ec-4021-b04f-0264df7a6307\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.832350 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/849cc5eb-fe81-4755-a13e-56cdc7b4f248-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-fhh5m\" (UID: \"849cc5eb-fe81-4755-a13e-56cdc7b4f248\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.832584 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-images\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.832840 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-trusted-ca\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.832904 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjkrj\" (UniqueName: \"kubernetes.io/projected/5b047179-d09b-486d-8e47-2c110a710e51-kube-api-access-fjkrj\") pod \"openshift-controller-manager-operator-756b6f6bc6-m7blq\" (UID: \"5b047179-d09b-486d-8e47-2c110a710e51\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.833032 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/11d847a0-fe18-4f4a-8d65-b0f64b643c68-secret-volume\") pod \"collect-profiles-29426580-zzsjs\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.833049 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-registration-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.833266 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-webhook-cert\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.833327 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a508f977-bce2-4269-ae18-1c655e9befd8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ckvrb\" (UID: \"a508f977-bce2-4269-ae18-1c655e9befd8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.833542 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/04b427fa-011a-4d5c-8844-48bb0c3f319e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-grwvc\" (UID: \"04b427fa-011a-4d5c-8844-48bb0c3f319e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.833764 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-socket-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.833867 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-trusted-ca\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.834087 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44e54907-8f0d-4c3c-960c-e1e5bcad7523-config\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.834930 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/472601ba-cee2-4f6e-ac53-a5606ef0469f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.835055 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-proxy-tls\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.835171 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-trusted-ca\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.835674 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68d97f24-c17b-4120-be6e-48655b5efe88-serving-cert\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.836175 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/472601ba-cee2-4f6e-ac53-a5606ef0469f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.837140 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.837249 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lbn2\" (UniqueName: \"kubernetes.io/projected/11d847a0-fe18-4f4a-8d65-b0f64b643c68-kube-api-access-9lbn2\") pod \"collect-profiles-29426580-zzsjs\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.837582 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-tmpfs\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.837626 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-tls\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.837646 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-bound-sa-token\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.837667 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54-cert\") pod \"ingress-canary-jgtrr\" (UID: \"74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54\") " pod="openshift-ingress-canary/ingress-canary-jgtrr" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.837687 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-csi-data-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: E1213 03:12:36.838917 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.338881905 +0000 UTC m=+49.574725461 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.845979 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwll8\" (UniqueName: \"kubernetes.io/projected/df8e88fc-38e8-4748-aed5-7fb71bfdb0df-kube-api-access-lwll8\") pod \"machine-config-operator-74547568cd-n4gjw\" (UID: \"df8e88fc-38e8-4748-aed5-7fb71bfdb0df\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.846224 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-tls\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.865472 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjzz6\" (UniqueName: \"kubernetes.io/projected/68d97f24-c17b-4120-be6e-48655b5efe88-kube-api-access-rjzz6\") pod \"etcd-operator-b45778765-rlmj4\" (UID: \"68d97f24-c17b-4120-be6e-48655b5efe88\") " pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.875947 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7tpn\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-kube-api-access-q7tpn\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.892103 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.923464 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-bound-sa-token\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.938432 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:36 crc kubenswrapper[5070]: E1213 03:12:36.938657 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.438626867 +0000 UTC m=+49.674470423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939208 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xfr5\" (UniqueName: \"kubernetes.io/projected/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-kube-api-access-5xfr5\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939254 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-config\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939299 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htt95\" (UniqueName: \"kubernetes.io/projected/b4996b72-90e0-46a0-a0e9-fed852729a89-kube-api-access-htt95\") pod \"service-ca-9c57cc56f-l9t5h\" (UID: \"b4996b72-90e0-46a0-a0e9-fed852729a89\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939321 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vblq5\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939344 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-service-ca-bundle\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939384 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-config\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939406 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glgjt\" (UniqueName: \"kubernetes.io/projected/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-kube-api-access-glgjt\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939424 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/994638f3-a23b-445a-b2d9-929361c0a5e3-metrics-tls\") pod \"dns-operator-744455d44c-2sbvn\" (UID: \"994638f3-a23b-445a-b2d9-929361c0a5e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939488 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-etcd-serving-ca\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939513 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/8cfebcb5-7935-4f04-8d87-40dce0dc5ef7-node-bootstrap-token\") pod \"machine-config-server-hrb5f\" (UID: \"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7\") " pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939560 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e55788c5-581c-4013-8c44-83a0b5f74b0a-etcd-client\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939586 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8t4d7\" (UniqueName: \"kubernetes.io/projected/d0e149ba-a883-49ad-a21f-40b99873662b-kube-api-access-8t4d7\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939633 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68bpt\" (UniqueName: \"kubernetes.io/projected/088b1a9f-bf10-4751-875e-092b9c149cfa-kube-api-access-68bpt\") pod \"multus-admission-controller-857f4d67dd-h8tb8\" (UID: \"088b1a9f-bf10-4751-875e-092b9c149cfa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939661 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k79b6\" (UniqueName: \"kubernetes.io/projected/d84fad97-769a-4f5d-8e19-d91d308675f6-kube-api-access-k79b6\") pod \"marketplace-operator-79b997595-vblq5\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939712 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m6ct\" (UniqueName: \"kubernetes.io/projected/e55788c5-581c-4013-8c44-83a0b5f74b0a-kube-api-access-6m6ct\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939736 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/44e54907-8f0d-4c3c-960c-e1e5bcad7523-machine-approver-tls\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939752 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e55788c5-581c-4013-8c44-83a0b5f74b0a-audit-dir\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939810 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62gjl\" (UniqueName: \"kubernetes.io/projected/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-kube-api-access-62gjl\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939843 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltgfq\" (UniqueName: \"kubernetes.io/projected/5dbe31ab-6e43-41c3-8f14-e1ac87938038-kube-api-access-ltgfq\") pod \"machine-config-controller-84d6567774-6td6p\" (UID: \"5dbe31ab-6e43-41c3-8f14-e1ac87938038\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939896 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e55788c5-581c-4013-8c44-83a0b5f74b0a-serving-cert\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939964 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-metrics-tls\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.939998 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a508f977-bce2-4269-ae18-1c655e9befd8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ckvrb\" (UID: \"a508f977-bce2-4269-ae18-1c655e9befd8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940048 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-serving-cert\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940070 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-audit\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940126 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-bound-sa-token\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940161 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940210 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940236 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5dbe31ab-6e43-41c3-8f14-e1ac87938038-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6td6p\" (UID: \"5dbe31ab-6e43-41c3-8f14-e1ac87938038\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940259 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-apiservice-cert\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940305 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/849cc5eb-fe81-4755-a13e-56cdc7b4f248-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-fhh5m\" (UID: \"849cc5eb-fe81-4755-a13e-56cdc7b4f248\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940330 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/849cc5eb-fe81-4755-a13e-56cdc7b4f248-config\") pod \"kube-controller-manager-operator-78b949d7b-fhh5m\" (UID: \"849cc5eb-fe81-4755-a13e-56cdc7b4f248\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940379 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5h54\" (UniqueName: \"kubernetes.io/projected/1ba4ef14-c563-4903-b359-f80b487d8ced-kube-api-access-m5h54\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940406 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6234fc7c-52ec-4021-b04f-0264df7a6307-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dlrd7\" (UID: \"6234fc7c-52ec-4021-b04f-0264df7a6307\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940402 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lbn2\" (UniqueName: \"kubernetes.io/projected/11d847a0-fe18-4f4a-8d65-b0f64b643c68-kube-api-access-9lbn2\") pod \"collect-profiles-29426580-zzsjs\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940473 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4h4x\" (UniqueName: \"kubernetes.io/projected/6234fc7c-52ec-4021-b04f-0264df7a6307-kube-api-access-r4h4x\") pod \"control-plane-machine-set-operator-78cbb6b69f-dlrd7\" (UID: \"6234fc7c-52ec-4021-b04f-0264df7a6307\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940503 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjkrj\" (UniqueName: \"kubernetes.io/projected/5b047179-d09b-486d-8e47-2c110a710e51-kube-api-access-fjkrj\") pod \"openshift-controller-manager-operator-756b6f6bc6-m7blq\" (UID: \"5b047179-d09b-486d-8e47-2c110a710e51\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940552 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-registration-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940576 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-webhook-cert\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940620 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a508f977-bce2-4269-ae18-1c655e9befd8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ckvrb\" (UID: \"a508f977-bce2-4269-ae18-1c655e9befd8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940646 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/04b427fa-011a-4d5c-8844-48bb0c3f319e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-grwvc\" (UID: \"04b427fa-011a-4d5c-8844-48bb0c3f319e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940668 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-socket-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940785 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-trusted-ca\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940816 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44e54907-8f0d-4c3c-960c-e1e5bcad7523-config\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940867 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940894 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-tmpfs\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940938 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54-cert\") pod \"ingress-canary-jgtrr\" (UID: \"74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54\") " pod="openshift-ingress-canary/ingress-canary-jgtrr" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.940959 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-csi-data-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941253 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-config\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941406 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vblq5\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941582 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/088b1a9f-bf10-4751-875e-092b9c149cfa-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-h8tb8\" (UID: \"088b1a9f-bf10-4751-875e-092b9c149cfa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941625 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/79783d23-8a02-4af8-bc4b-8e8f74dae08e-profile-collector-cert\") pod \"catalog-operator-68c6474976-kpb6p\" (UID: \"79783d23-8a02-4af8-bc4b-8e8f74dae08e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941672 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b4996b72-90e0-46a0-a0e9-fed852729a89-signing-key\") pod \"service-ca-9c57cc56f-l9t5h\" (UID: \"b4996b72-90e0-46a0-a0e9-fed852729a89\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941703 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2ms9\" (UniqueName: \"kubernetes.io/projected/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-kube-api-access-l2ms9\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941765 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwnzg\" (UniqueName: \"kubernetes.io/projected/79783d23-8a02-4af8-bc4b-8e8f74dae08e-kube-api-access-pwnzg\") pod \"catalog-operator-68c6474976-kpb6p\" (UID: \"79783d23-8a02-4af8-bc4b-8e8f74dae08e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941793 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5dbe31ab-6e43-41c3-8f14-e1ac87938038-proxy-tls\") pod \"machine-config-controller-84d6567774-6td6p\" (UID: \"5dbe31ab-6e43-41c3-8f14-e1ac87938038\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941849 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-mountpoint-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941880 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e55788c5-581c-4013-8c44-83a0b5f74b0a-encryption-config\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941939 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbjzr\" (UniqueName: \"kubernetes.io/projected/44e54907-8f0d-4c3c-960c-e1e5bcad7523-kube-api-access-wbjzr\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.941966 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/849cc5eb-fe81-4755-a13e-56cdc7b4f248-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-fhh5m\" (UID: \"849cc5eb-fe81-4755-a13e-56cdc7b4f248\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942016 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b047179-d09b-486d-8e47-2c110a710e51-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-m7blq\" (UID: \"5b047179-d09b-486d-8e47-2c110a710e51\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942105 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-plugins-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942132 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx678\" (UniqueName: \"kubernetes.io/projected/8cfebcb5-7935-4f04-8d87-40dce0dc5ef7-kube-api-access-mx678\") pod \"machine-config-server-hrb5f\" (UID: \"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7\") " pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942155 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942203 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q7lj\" (UniqueName: \"kubernetes.io/projected/994638f3-a23b-445a-b2d9-929361c0a5e3-kube-api-access-4q7lj\") pod \"dns-operator-744455d44c-2sbvn\" (UID: \"994638f3-a23b-445a-b2d9-929361c0a5e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942232 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/44e54907-8f0d-4c3c-960c-e1e5bcad7523-auth-proxy-config\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942279 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a508f977-bce2-4269-ae18-1c655e9befd8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ckvrb\" (UID: \"a508f977-bce2-4269-ae18-1c655e9befd8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942308 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a54536a5-e3a5-40e5-8864-f799bdfff6ca-metrics-tls\") pod \"dns-default-n85jm\" (UID: \"a54536a5-e3a5-40e5-8864-f799bdfff6ca\") " pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942361 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-image-import-ca\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942384 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e55788c5-581c-4013-8c44-83a0b5f74b0a-audit-dir\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942390 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b047179-d09b-486d-8e47-2c110a710e51-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-m7blq\" (UID: \"5b047179-d09b-486d-8e47-2c110a710e51\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942457 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjdmv\" (UniqueName: \"kubernetes.io/projected/04b427fa-011a-4d5c-8844-48bb0c3f319e-kube-api-access-bjdmv\") pod \"package-server-manager-789f6589d5-grwvc\" (UID: \"04b427fa-011a-4d5c-8844-48bb0c3f319e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942481 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ba4ef14-c563-4903-b359-f80b487d8ced-serving-cert\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942510 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4vkp\" (UniqueName: \"kubernetes.io/projected/74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54-kube-api-access-x4vkp\") pod \"ingress-canary-jgtrr\" (UID: \"74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54\") " pod="openshift-ingress-canary/ingress-canary-jgtrr" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942531 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-client-ca\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942566 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b4996b72-90e0-46a0-a0e9-fed852729a89-signing-cabundle\") pod \"service-ca-9c57cc56f-l9t5h\" (UID: \"b4996b72-90e0-46a0-a0e9-fed852729a89\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942607 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgzzs\" (UniqueName: \"kubernetes.io/projected/a54536a5-e3a5-40e5-8864-f799bdfff6ca-kube-api-access-zgzzs\") pod \"dns-default-n85jm\" (UID: \"a54536a5-e3a5-40e5-8864-f799bdfff6ca\") " pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942642 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e55788c5-581c-4013-8c44-83a0b5f74b0a-node-pullsecrets\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942666 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942695 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/8cfebcb5-7935-4f04-8d87-40dce0dc5ef7-certs\") pod \"machine-config-server-hrb5f\" (UID: \"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7\") " pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942729 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/849cc5eb-fe81-4755-a13e-56cdc7b4f248-config\") pod \"kube-controller-manager-operator-78b949d7b-fhh5m\" (UID: \"849cc5eb-fe81-4755-a13e-56cdc7b4f248\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942734 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vblq5\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942777 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-ready\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942800 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a54536a5-e3a5-40e5-8864-f799bdfff6ca-config-volume\") pod \"dns-default-n85jm\" (UID: \"a54536a5-e3a5-40e5-8864-f799bdfff6ca\") " pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942818 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/79783d23-8a02-4af8-bc4b-8e8f74dae08e-srv-cert\") pod \"catalog-operator-68c6474976-kpb6p\" (UID: \"79783d23-8a02-4af8-bc4b-8e8f74dae08e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.942834 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-config\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.943214 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44e54907-8f0d-4c3c-960c-e1e5bcad7523-config\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.944000 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b047179-d09b-486d-8e47-2c110a710e51-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-m7blq\" (UID: \"5b047179-d09b-486d-8e47-2c110a710e51\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.944391 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-config\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.945049 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-service-ca-bundle\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.945364 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-config\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.945543 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e55788c5-581c-4013-8c44-83a0b5f74b0a-node-pullsecrets\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.945581 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-socket-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: E1213 03:12:36.945985 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.445967731 +0000 UTC m=+49.681811467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.946032 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.947378 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.947465 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-audit\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.947787 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-mountpoint-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.947799 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-ready\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.948386 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a54536a5-e3a5-40e5-8864-f799bdfff6ca-config-volume\") pod \"dns-default-n85jm\" (UID: \"a54536a5-e3a5-40e5-8864-f799bdfff6ca\") " pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.948712 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.948831 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-tmpfs\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.948878 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b4996b72-90e0-46a0-a0e9-fed852729a89-signing-cabundle\") pod \"service-ca-9c57cc56f-l9t5h\" (UID: \"b4996b72-90e0-46a0-a0e9-fed852729a89\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.949730 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-csi-data-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.951637 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-plugins-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.951904 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d0e149ba-a883-49ad-a21f-40b99873662b-registration-dir\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.952691 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a508f977-bce2-4269-ae18-1c655e9befd8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ckvrb\" (UID: \"a508f977-bce2-4269-ae18-1c655e9befd8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.952761 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5dbe31ab-6e43-41c3-8f14-e1ac87938038-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6td6p\" (UID: \"5dbe31ab-6e43-41c3-8f14-e1ac87938038\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.953324 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-trusted-ca\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.953478 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/44e54907-8f0d-4c3c-960c-e1e5bcad7523-auth-proxy-config\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.954084 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-etcd-serving-ca\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.954242 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-client-ca\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.955336 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/8cfebcb5-7935-4f04-8d87-40dce0dc5ef7-node-bootstrap-token\") pod \"machine-config-server-hrb5f\" (UID: \"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7\") " pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.955451 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/8cfebcb5-7935-4f04-8d87-40dce0dc5ef7-certs\") pod \"machine-config-server-hrb5f\" (UID: \"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7\") " pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.957316 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b047179-d09b-486d-8e47-2c110a710e51-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-m7blq\" (UID: \"5b047179-d09b-486d-8e47-2c110a710e51\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.957696 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-serving-cert\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.959107 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e55788c5-581c-4013-8c44-83a0b5f74b0a-etcd-client\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.959208 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/994638f3-a23b-445a-b2d9-929361c0a5e3-metrics-tls\") pod \"dns-operator-744455d44c-2sbvn\" (UID: \"994638f3-a23b-445a-b2d9-929361c0a5e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.959589 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/849cc5eb-fe81-4755-a13e-56cdc7b4f248-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-fhh5m\" (UID: \"849cc5eb-fe81-4755-a13e-56cdc7b4f248\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.960107 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/44e54907-8f0d-4c3c-960c-e1e5bcad7523-machine-approver-tls\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.960486 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-image-import-ca\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.960607 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e55788c5-581c-4013-8c44-83a0b5f74b0a-encryption-config\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.961255 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e55788c5-581c-4013-8c44-83a0b5f74b0a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.961335 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5dbe31ab-6e43-41c3-8f14-e1ac87938038-proxy-tls\") pod \"machine-config-controller-84d6567774-6td6p\" (UID: \"5dbe31ab-6e43-41c3-8f14-e1ac87938038\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.961761 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vblq5\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.965392 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a508f977-bce2-4269-ae18-1c655e9befd8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ckvrb\" (UID: \"a508f977-bce2-4269-ae18-1c655e9befd8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.966185 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-apiservice-cert\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.968248 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/79783d23-8a02-4af8-bc4b-8e8f74dae08e-srv-cert\") pod \"catalog-operator-68c6474976-kpb6p\" (UID: \"79783d23-8a02-4af8-bc4b-8e8f74dae08e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.968402 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/79783d23-8a02-4af8-bc4b-8e8f74dae08e-profile-collector-cert\") pod \"catalog-operator-68c6474976-kpb6p\" (UID: \"79783d23-8a02-4af8-bc4b-8e8f74dae08e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.968570 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/04b427fa-011a-4d5c-8844-48bb0c3f319e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-grwvc\" (UID: \"04b427fa-011a-4d5c-8844-48bb0c3f319e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.968633 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e55788c5-581c-4013-8c44-83a0b5f74b0a-serving-cert\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.970414 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ba4ef14-c563-4903-b359-f80b487d8ced-serving-cert\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.970793 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a54536a5-e3a5-40e5-8864-f799bdfff6ca-metrics-tls\") pod \"dns-default-n85jm\" (UID: \"a54536a5-e3a5-40e5-8864-f799bdfff6ca\") " pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.972517 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-webhook-cert\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.974818 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54-cert\") pod \"ingress-canary-jgtrr\" (UID: \"74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54\") " pod="openshift-ingress-canary/ingress-canary-jgtrr" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.975412 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-metrics-tls\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.977419 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b4996b72-90e0-46a0-a0e9-fed852729a89-signing-key\") pod \"service-ca-9c57cc56f-l9t5h\" (UID: \"b4996b72-90e0-46a0-a0e9-fed852729a89\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.977486 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6234fc7c-52ec-4021-b04f-0264df7a6307-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-dlrd7\" (UID: \"6234fc7c-52ec-4021-b04f-0264df7a6307\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.979253 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xfr5\" (UniqueName: \"kubernetes.io/projected/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-kube-api-access-5xfr5\") pod \"cni-sysctl-allowlist-ds-9chqz\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:36 crc kubenswrapper[5070]: I1213 03:12:36.984291 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/088b1a9f-bf10-4751-875e-092b9c149cfa-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-h8tb8\" (UID: \"088b1a9f-bf10-4751-875e-092b9c149cfa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.017286 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.020261 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htt95\" (UniqueName: \"kubernetes.io/projected/b4996b72-90e0-46a0-a0e9-fed852729a89-kube-api-access-htt95\") pod \"service-ca-9c57cc56f-l9t5h\" (UID: \"b4996b72-90e0-46a0-a0e9-fed852729a89\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.029472 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k79b6\" (UniqueName: \"kubernetes.io/projected/d84fad97-769a-4f5d-8e19-d91d308675f6-kube-api-access-k79b6\") pod \"marketplace-operator-79b997595-vblq5\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.043774 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.044213 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.544197304 +0000 UTC m=+49.780040850 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.054013 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glgjt\" (UniqueName: \"kubernetes.io/projected/bbdcfa81-b48d-4067-af2e-0de54cea8c7e-kube-api-access-glgjt\") pod \"authentication-operator-69f744f599-k6vpl\" (UID: \"bbdcfa81-b48d-4067-af2e-0de54cea8c7e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.064255 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8t4d7\" (UniqueName: \"kubernetes.io/projected/d0e149ba-a883-49ad-a21f-40b99873662b-kube-api-access-8t4d7\") pod \"csi-hostpathplugin-6fssx\" (UID: \"d0e149ba-a883-49ad-a21f-40b99873662b\") " pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.089962 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68bpt\" (UniqueName: \"kubernetes.io/projected/088b1a9f-bf10-4751-875e-092b9c149cfa-kube-api-access-68bpt\") pod \"multus-admission-controller-857f4d67dd-h8tb8\" (UID: \"088b1a9f-bf10-4751-875e-092b9c149cfa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.105659 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m6ct\" (UniqueName: \"kubernetes.io/projected/e55788c5-581c-4013-8c44-83a0b5f74b0a-kube-api-access-6m6ct\") pod \"apiserver-76f77b778f-7hxcj\" (UID: \"e55788c5-581c-4013-8c44-83a0b5f74b0a\") " pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.124016 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5h54\" (UniqueName: \"kubernetes.io/projected/1ba4ef14-c563-4903-b359-f80b487d8ced-kube-api-access-m5h54\") pod \"route-controller-manager-6576b87f9c-27s4z\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.143656 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.144896 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.145372 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.645357054 +0000 UTC m=+49.881200600 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.147347 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgzzs\" (UniqueName: \"kubernetes.io/projected/a54536a5-e3a5-40e5-8864-f799bdfff6ca-kube-api-access-zgzzs\") pod \"dns-default-n85jm\" (UID: \"a54536a5-e3a5-40e5-8864-f799bdfff6ca\") " pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.147603 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.155901 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-z6jvd"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.167970 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwnzg\" (UniqueName: \"kubernetes.io/projected/79783d23-8a02-4af8-bc4b-8e8f74dae08e-kube-api-access-pwnzg\") pod \"catalog-operator-68c6474976-kpb6p\" (UID: \"79783d23-8a02-4af8-bc4b-8e8f74dae08e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.168248 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.179696 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.179859 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.188363 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2ms9\" (UniqueName: \"kubernetes.io/projected/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-kube-api-access-l2ms9\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.199970 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.200555 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.201125 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.208378 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbjzr\" (UniqueName: \"kubernetes.io/projected/44e54907-8f0d-4c3c-960c-e1e5bcad7523-kube-api-access-wbjzr\") pod \"machine-approver-56656f9798-5wtk8\" (UID: \"44e54907-8f0d-4c3c-960c-e1e5bcad7523\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.230428 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/849cc5eb-fe81-4755-a13e-56cdc7b4f248-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-fhh5m\" (UID: \"849cc5eb-fe81-4755-a13e-56cdc7b4f248\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.233568 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dwkv2"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.233708 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qk5h4"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.247769 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-6fssx" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.248659 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.248853 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.748836946 +0000 UTC m=+49.984680482 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.248979 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.249036 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.249245 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.749237776 +0000 UTC m=+49.985081322 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.269780 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.272919 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.289178 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.289883 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjkrj\" (UniqueName: \"kubernetes.io/projected/5b047179-d09b-486d-8e47-2c110a710e51-kube-api-access-fjkrj\") pod \"openshift-controller-manager-operator-756b6f6bc6-m7blq\" (UID: \"5b047179-d09b-486d-8e47-2c110a710e51\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.294078 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0-bound-sa-token\") pod \"ingress-operator-5b745b69d9-vx28k\" (UID: \"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.302270 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.312553 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4h4x\" (UniqueName: \"kubernetes.io/projected/6234fc7c-52ec-4021-b04f-0264df7a6307-kube-api-access-r4h4x\") pod \"control-plane-machine-set-operator-78cbb6b69f-dlrd7\" (UID: \"6234fc7c-52ec-4021-b04f-0264df7a6307\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.329369 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.331290 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltgfq\" (UniqueName: \"kubernetes.io/projected/5dbe31ab-6e43-41c3-8f14-e1ac87938038-kube-api-access-ltgfq\") pod \"machine-config-controller-84d6567774-6td6p\" (UID: \"5dbe31ab-6e43-41c3-8f14-e1ac87938038\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.335943 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62gjl\" (UniqueName: \"kubernetes.io/projected/7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82-kube-api-access-62gjl\") pod \"packageserver-d55dfcdfc-wm2b8\" (UID: \"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.351266 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.351742 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.851720742 +0000 UTC m=+50.087564288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.365153 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.375680 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q7lj\" (UniqueName: \"kubernetes.io/projected/994638f3-a23b-445a-b2d9-929361c0a5e3-kube-api-access-4q7lj\") pod \"dns-operator-744455d44c-2sbvn\" (UID: \"994638f3-a23b-445a-b2d9-929361c0a5e3\") " pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.387371 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx678\" (UniqueName: \"kubernetes.io/projected/8cfebcb5-7935-4f04-8d87-40dce0dc5ef7-kube-api-access-mx678\") pod \"machine-config-server-hrb5f\" (UID: \"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7\") " pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.387382 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.398619 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4vkp\" (UniqueName: \"kubernetes.io/projected/74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54-kube-api-access-x4vkp\") pod \"ingress-canary-jgtrr\" (UID: \"74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54\") " pod="openshift-ingress-canary/ingress-canary-jgtrr" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.403785 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.406722 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.417556 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a508f977-bce2-4269-ae18-1c655e9befd8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ckvrb\" (UID: \"a508f977-bce2-4269-ae18-1c655e9befd8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.421650 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.430699 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjdmv\" (UniqueName: \"kubernetes.io/projected/04b427fa-011a-4d5c-8844-48bb0c3f319e-kube-api-access-bjdmv\") pod \"package-server-manager-789f6589d5-grwvc\" (UID: \"04b427fa-011a-4d5c-8844-48bb0c3f319e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.439682 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.452623 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.452974 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:37.952961785 +0000 UTC m=+50.188805321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.455428 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.505722 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-jgtrr" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.510232 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc"] Dec 13 03:12:37 crc kubenswrapper[5070]: W1213 03:12:37.511109 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf8e88fc_38e8_4748_aed5_7fb71bfdb0df.slice/crio-0cf6a02c846cde20b2265333fd193052701207ca9901536bba1670ca8e1faf7b WatchSource:0}: Error finding container 0cf6a02c846cde20b2265333fd193052701207ca9901536bba1670ca8e1faf7b: Status 404 returned error can't find the container with id 0cf6a02c846cde20b2265333fd193052701207ca9901536bba1670ca8e1faf7b Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.514780 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-hrb5f" Dec 13 03:12:37 crc kubenswrapper[5070]: W1213 03:12:37.521288 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dd269a0_64a2_4e74_91e2_7edbf5fd5574.slice/crio-c6a723979946f050d29a6a019370c09270739aad6d70447e61e4298aaec014ab WatchSource:0}: Error finding container c6a723979946f050d29a6a019370c09270739aad6d70447e61e4298aaec014ab: Status 404 returned error can't find the container with id c6a723979946f050d29a6a019370c09270739aad6d70447e61e4298aaec014ab Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.556176 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.557079 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.057055822 +0000 UTC m=+50.292899368 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.557241 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.557665 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.057648148 +0000 UTC m=+50.293491684 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.561862 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-g4gp9" event={"ID":"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b","Type":"ContainerStarted","Data":"ee8322c00035dd74c521707c91521791d67b2bea4509d026e688fcb6568b6e3c"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.563278 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.563745 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" event={"ID":"1a78b35d-1521-4a5a-9cb0-c73064d59f12","Type":"ContainerStarted","Data":"93c2277686b49e8164a9b7983e0cb9704f755e1a56bb26cdfff2ea6d65ebfaf6"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.569380 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" event={"ID":"e96df34c-cff8-4655-9f8c-2f0baf4f772c","Type":"ContainerStarted","Data":"37a93ea13eff4c9c568366938c43740c7b80f561488d99145ad753fc5e98475c"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.569421 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" event={"ID":"e96df34c-cff8-4655-9f8c-2f0baf4f772c","Type":"ContainerStarted","Data":"fd2b5b7197c108a84f94294d12084e18810d0fedbf75031b58e080bac86c1edc"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.574040 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" event={"ID":"3dd269a0-64a2-4e74-91e2-7edbf5fd5574","Type":"ContainerStarted","Data":"c6a723979946f050d29a6a019370c09270739aad6d70447e61e4298aaec014ab"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.581544 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z6jvd" event={"ID":"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4","Type":"ContainerStarted","Data":"bb4175f0ff80d861a7326af9daada06b5b3e0c4fa4c548892966a1ced6fc4ba0"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.582671 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.589963 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" event={"ID":"e0009fee-6991-4819-ae3d-2d075aa961af","Type":"ContainerStarted","Data":"30cc032d356b726ab109a95efc70bedfbecea35f8004ce8257621adfc3a0f6f1"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.590318 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l9t5h"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.591975 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" event={"ID":"df8e88fc-38e8-4748-aed5-7fb71bfdb0df","Type":"ContainerStarted","Data":"0cf6a02c846cde20b2265333fd193052701207ca9901536bba1670ca8e1faf7b"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.593035 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" event={"ID":"725fea52-a164-4df9-9bbf-13cda9f52cb6","Type":"ContainerStarted","Data":"2c8360781c629414a552f7920785a6a743714384532ae1a2d9660958fd5edc94"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.602472 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" event={"ID":"56416152-93a7-49cb-a1f0-01577eb6cadc","Type":"ContainerStarted","Data":"d5a4f7394055abc14992267dcb3d621aaad12d899e87130a5c6ea615ec98a686"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.624269 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" event={"ID":"6dca72ea-876f-4b17-90ee-62f8287ea7d6","Type":"ContainerStarted","Data":"e9c9cf62f2790984b1439986203018725de6ef0cf75aa9eccf6a090c592e7415"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.629530 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" event={"ID":"47eff15d-d8a1-4f98-8579-eb8cdec723ff","Type":"ContainerStarted","Data":"98e5da74c7df117aa336ee011e3148bec4a4bcf01af61dfbcd4f6ce41eb6608f"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.629589 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-rlmj4"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.631391 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-bjz8x" event={"ID":"93fc381a-a5cc-4d02-bd2a-ba2898536d45","Type":"ContainerStarted","Data":"6b9c36b6ce0ec0bcac02fb353b800766c57fb9913f94aba3e935adaf8c7f2b65"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.637229 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.640031 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" event={"ID":"3293f634-1926-4bb8-b639-44d6d14263cb","Type":"ContainerStarted","Data":"2a2349c84000f201df763ef11bbd7e4226f7ded24347c6663dc822719939b39e"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.640087 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" event={"ID":"3293f634-1926-4bb8-b639-44d6d14263cb","Type":"ContainerStarted","Data":"d977e364a662f87d6ff79103c489ba675ef29e249bc9e55ad8d9fc0a45074890"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.643769 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" event={"ID":"9829d6ca-fc22-4e71-a966-cff569f273fb","Type":"ContainerStarted","Data":"f37f3b2bd16bf0af87b95d3e36d786868b9a26a3f947499e82fe714ab385ce3b"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.645523 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" event={"ID":"e26ec275-42a6-42af-a6ed-0872bd777acf","Type":"ContainerStarted","Data":"2bb2e9188d66aa3e89327e71b098c5b8ec6333eff88b88f4625c32dc3c18128a"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.649156 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-z8556" event={"ID":"bf452261-a66d-45bd-9155-55aa347f086a","Type":"ContainerStarted","Data":"a95ab1fca0d8501fc50edcd66a5920f8e06dae7f973577105522bd36224895cb"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.653160 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" event={"ID":"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f","Type":"ContainerStarted","Data":"15087a7ba25b88313d9de7e0d42d3ff705c647717cf855456e397297e6edb15c"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.653206 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" event={"ID":"4896d2ca-5911-48fd-a2dd-89d8af9ddf3f","Type":"ContainerStarted","Data":"84ae1d167aa7c2ccbe6bdae936fee945acaf21623fcdc3d9f0c5759cb8ee582f"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.653825 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vblq5"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.654724 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b" event={"ID":"9dffbe71-422b-480e-8bf0-e6b89f6daa88","Type":"ContainerStarted","Data":"510b24a2d046c6a42ede39d18910c47a43d3c273e4e412857330883c5ffeaa35"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.654745 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b" event={"ID":"9dffbe71-422b-480e-8bf0-e6b89f6daa88","Type":"ContainerStarted","Data":"587a0b9440f183646477f81a1cbf8347958b1ff2266e01ec65c609b42dbbe3e0"} Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.658319 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.658671 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.158656194 +0000 UTC m=+50.394499740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.724864 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.753054 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6fssx"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.754427 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-n85jm"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.760520 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.761722 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.261704514 +0000 UTC m=+50.497548060 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.803567 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.805323 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7hxcj"] Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.857865 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=19.857844393 podStartE2EDuration="19.857844393s" podCreationTimestamp="2025-12-13 03:12:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:37.85662138 +0000 UTC m=+50.092464926" watchObservedRunningTime="2025-12-13 03:12:37.857844393 +0000 UTC m=+50.093687939" Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.861319 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.861479 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.361458618 +0000 UTC m=+50.597302184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.861586 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.861897 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.361885919 +0000 UTC m=+50.597729465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: W1213 03:12:37.888110 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda54536a5_e3a5_40e5_8864_f799bdfff6ca.slice/crio-46a24b89a2be712702ce86deae239c16b1a2649a9fc4f75bdcbf09d24d5e1e94 WatchSource:0}: Error finding container 46a24b89a2be712702ce86deae239c16b1a2649a9fc4f75bdcbf09d24d5e1e94: Status 404 returned error can't find the container with id 46a24b89a2be712702ce86deae239c16b1a2649a9fc4f75bdcbf09d24d5e1e94 Dec 13 03:12:37 crc kubenswrapper[5070]: W1213 03:12:37.888366 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4996b72_90e0_46a0_a0e9_fed852729a89.slice/crio-3e68bf3fcd2c7b11ff834a4a2b4fb37d12f0a83c3841cd54e0480b7921f03c37 WatchSource:0}: Error finding container 3e68bf3fcd2c7b11ff834a4a2b4fb37d12f0a83c3841cd54e0480b7921f03c37: Status 404 returned error can't find the container with id 3e68bf3fcd2c7b11ff834a4a2b4fb37d12f0a83c3841cd54e0480b7921f03c37 Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.962410 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.962667 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.462616388 +0000 UTC m=+50.698459934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:37 crc kubenswrapper[5070]: I1213 03:12:37.963233 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:37 crc kubenswrapper[5070]: E1213 03:12:37.963743 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.463727067 +0000 UTC m=+50.699570613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.008782 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-h8tb8"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.046006 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-k6vpl"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.066225 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.066953 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.566925952 +0000 UTC m=+50.802769508 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.082019 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.082095 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.082403 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.582391121 +0000 UTC m=+50.818234667 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.100125 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86e29ac2-28cb-457d-8f96-5a60c3d535e3-metrics-certs\") pod \"network-metrics-daemon-tmwbx\" (UID: \"86e29ac2-28cb-457d-8f96-5a60c3d535e3\") " pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:38 crc kubenswrapper[5070]: W1213 03:12:38.138711 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbbdcfa81_b48d_4067_af2e_0de54cea8c7e.slice/crio-ec7c1e79ae7407fc11af702546f70b928c77d1d3625e2dd769272ed54d18f2a1 WatchSource:0}: Error finding container ec7c1e79ae7407fc11af702546f70b928c77d1d3625e2dd769272ed54d18f2a1: Status 404 returned error can't find the container with id ec7c1e79ae7407fc11af702546f70b928c77d1d3625e2dd769272ed54d18f2a1 Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.187035 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.187718 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.68770426 +0000 UTC m=+50.923547806 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.189934 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.193205 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:38 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:38 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:38 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.193252 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.212429 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tmwbx" Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.261540 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.289774 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.290176 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.790163635 +0000 UTC m=+51.026007171 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.333192 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.354826 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.395524 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.397360 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.399992 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.899955593 +0000 UTC m=+51.135799149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.403749 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.404172 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:38.904149844 +0000 UTC m=+51.139993580 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.418986 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.444967 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.513461 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:38 crc kubenswrapper[5070]: W1213 03:12:38.532579 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod331fdd7d_1eb1_4a86_ace3_75fe6d2a50e0.slice/crio-d82a79b03ca14c726d54cf132710593cad54b75f24064a85ff602d21d704fad3 WatchSource:0}: Error finding container d82a79b03ca14c726d54cf132710593cad54b75f24064a85ff602d21d704fad3: Status 404 returned error can't find the container with id d82a79b03ca14c726d54cf132710593cad54b75f24064a85ff602d21d704fad3 Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.536739 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.036695973 +0000 UTC m=+51.272539519 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.599205 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.637996 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.638805 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.138771487 +0000 UTC m=+51.374615033 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: W1213 03:12:38.639605 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7eb9db2d_0c78_4df0_ad39_1b3d6bc75d82.slice/crio-1aa52c8cab78537d98327ad35a6f0576db582b5194ff650a7ed380c26cf49eea WatchSource:0}: Error finding container 1aa52c8cab78537d98327ad35a6f0576db582b5194ff650a7ed380c26cf49eea: Status 404 returned error can't find the container with id 1aa52c8cab78537d98327ad35a6f0576db582b5194ff650a7ed380c26cf49eea Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.698814 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" event={"ID":"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82","Type":"ContainerStarted","Data":"1aa52c8cab78537d98327ad35a6f0576db582b5194ff650a7ed380c26cf49eea"} Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.709521 5070 generic.go:334] "Generic (PLEG): container finished" podID="3293f634-1926-4bb8-b639-44d6d14263cb" containerID="2a2349c84000f201df763ef11bbd7e4226f7ded24347c6663dc822719939b39e" exitCode=0 Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.709637 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" event={"ID":"3293f634-1926-4bb8-b639-44d6d14263cb","Type":"ContainerDied","Data":"2a2349c84000f201df763ef11bbd7e4226f7ded24347c6663dc822719939b39e"} Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.716478 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6fssx" event={"ID":"d0e149ba-a883-49ad-a21f-40b99873662b","Type":"ContainerStarted","Data":"9ba053a71ec7817e610b3dab87b7972fc802afbfe32b669238097008d7328a01"} Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.732808 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-jgtrr"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.739176 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.739509 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.239463685 +0000 UTC m=+51.475307241 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.739579 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.740256 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" event={"ID":"68d97f24-c17b-4120-be6e-48655b5efe88","Type":"ContainerStarted","Data":"7ffdf497bfec73f22ce857796a9799a788a802078ce10613c02bbc8d6461b1e9"} Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.740393 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.24038041 +0000 UTC m=+51.476223956 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.748601 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" event={"ID":"11d847a0-fe18-4f4a-8d65-b0f64b643c68","Type":"ContainerStarted","Data":"82374e85a92c06e0d4effc40a505245b1f37328321811e2921c4984b27ac2f87"} Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.764136 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-2sbvn"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.767418 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" event={"ID":"e55788c5-581c-4013-8c44-83a0b5f74b0a","Type":"ContainerStarted","Data":"3e063fa566f98ad20570e30f1bc109775ba7fde34116a587e9d914dc9d84f1a2"} Dec 13 03:12:38 crc kubenswrapper[5070]: W1213 03:12:38.773828 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6234fc7c_52ec_4021_b04f_0264df7a6307.slice/crio-8009858eccb397de48ba0d6b760ae7cd236b0a9bb4084a05f4d5f783fee02ae6 WatchSource:0}: Error finding container 8009858eccb397de48ba0d6b760ae7cd236b0a9bb4084a05f4d5f783fee02ae6: Status 404 returned error can't find the container with id 8009858eccb397de48ba0d6b760ae7cd236b0a9bb4084a05f4d5f783fee02ae6 Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.814150 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" event={"ID":"79783d23-8a02-4af8-bc4b-8e8f74dae08e","Type":"ContainerStarted","Data":"e71b8cc487aff0f95f6f626f46695dfc8ea8a1f5f11675ae07a5a05a484020ab"} Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.843104 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.843889 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.343855931 +0000 UTC m=+51.579699477 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.844384 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.850722 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" event={"ID":"1ba4ef14-c563-4903-b359-f80b487d8ced","Type":"ContainerStarted","Data":"574dbd0d94a157c547bbcb99e0ffeb1cdc0b7f9d3c781a7c08ab19be782831fc"} Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.882170 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc"] Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.929756 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" event={"ID":"44e54907-8f0d-4c3c-960c-e1e5bcad7523","Type":"ContainerStarted","Data":"4ac38ecd0398672ab2c4b773cf1f438821d3fe4d7d6594efe87c0aae8290efec"} Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.950381 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" event={"ID":"725fea52-a164-4df9-9bbf-13cda9f52cb6","Type":"ContainerStarted","Data":"db06099f93b755af8d35f8673de999912580d84e9056a5a1babb31737c4108b8"} Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.952878 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:38 crc kubenswrapper[5070]: E1213 03:12:38.953743 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.453725582 +0000 UTC m=+51.689569128 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.954402 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-hrb5f" event={"ID":"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7","Type":"ContainerStarted","Data":"23372c85d5ad867d4ff8977dc11a554bf5432f3831c243e53c376b8afb42dfcc"} Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.990020 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" event={"ID":"e26ec275-42a6-42af-a6ed-0872bd777acf","Type":"ContainerStarted","Data":"5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec"} Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.990901 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:38 crc kubenswrapper[5070]: I1213 03:12:38.994081 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq"] Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.014555 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" event={"ID":"bbdcfa81-b48d-4067-af2e-0de54cea8c7e","Type":"ContainerStarted","Data":"ec7c1e79ae7407fc11af702546f70b928c77d1d3625e2dd769272ed54d18f2a1"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.020318 5070 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-qk5h4 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/healthz\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.020395 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" podUID="e26ec275-42a6-42af-a6ed-0872bd777acf" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.15:8443/healthz\": dial tcp 10.217.0.15:8443: connect: connection refused" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.020501 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" event={"ID":"b4996b72-90e0-46a0-a0e9-fed852729a89","Type":"ContainerStarted","Data":"3e68bf3fcd2c7b11ff834a4a2b4fb37d12f0a83c3841cd54e0480b7921f03c37"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.024955 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" event={"ID":"088b1a9f-bf10-4751-875e-092b9c149cfa","Type":"ContainerStarted","Data":"dab886cffc2d9c163bbe5c9578e862ec51f4b1201caf84fe3edb6c6530df65f7"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.030719 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z6jvd" event={"ID":"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4","Type":"ContainerStarted","Data":"a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.035809 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-bjz8x" event={"ID":"93fc381a-a5cc-4d02-bd2a-ba2898536d45","Type":"ContainerStarted","Data":"8b2031a58e3cd6a3bcc7e83c1fffe1e197dc7bf3f3078ad036c0360312771958"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.036380 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-bjz8x" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.038345 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" event={"ID":"849cc5eb-fe81-4755-a13e-56cdc7b4f248","Type":"ContainerStarted","Data":"5fea2b81c3fc1207f285ae8f522f4ea47132bcf1e4db3ad8d2425ddb86671107"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.046016 5070 patch_prober.go:28] interesting pod/downloads-7954f5f757-bjz8x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.046081 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bjz8x" podUID="93fc381a-a5cc-4d02-bd2a-ba2898536d45" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.055609 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.057829 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.557798939 +0000 UTC m=+51.793642485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.058378 5070 generic.go:334] "Generic (PLEG): container finished" podID="56416152-93a7-49cb-a1f0-01577eb6cadc" containerID="500eeac0a751504a44cf31f5fe2b73cf010ca4b585162b0d74c3c0ffa6c4d137" exitCode=0 Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.058580 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" event={"ID":"56416152-93a7-49cb-a1f0-01577eb6cadc","Type":"ContainerDied","Data":"500eeac0a751504a44cf31f5fe2b73cf010ca4b585162b0d74c3c0ffa6c4d137"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.065837 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-g4gp9" event={"ID":"9ad1cf0d-be40-42ac-adf2-4fd3b3d40d6b","Type":"ContainerStarted","Data":"ad786c787ab963cc0e5c31396bbf28c590980be4afb40524ebf4d04a854dbfe7"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.066377 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.072414 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" event={"ID":"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0","Type":"ContainerStarted","Data":"d82a79b03ca14c726d54cf132710593cad54b75f24064a85ff602d21d704fad3"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.079168 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-n85jm" event={"ID":"a54536a5-e3a5-40e5-8864-f799bdfff6ca","Type":"ContainerStarted","Data":"46a24b89a2be712702ce86deae239c16b1a2649a9fc4f75bdcbf09d24d5e1e94"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.081147 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" event={"ID":"d84fad97-769a-4f5d-8e19-d91d308675f6","Type":"ContainerStarted","Data":"5c04893236b6f556e708ee8202dade3fe2b4d733c8358902ed075733d11f790d"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.089054 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" event={"ID":"3cd4a611-b694-4ee2-9290-b62ad8854af3","Type":"ContainerStarted","Data":"8d50ec2b8cbf5ee861a735f05d843d6d28bef148ee8e9e2e0d0418df4788cffe"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.092677 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-g4gp9" Dec 13 03:12:39 crc kubenswrapper[5070]: W1213 03:12:39.101673 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04b427fa_011a_4d5c_8844_48bb0c3f319e.slice/crio-6ba46108994f333c2eaed8b7318ba3affc6fa6b62017bc9882e64e3ff5de774e WatchSource:0}: Error finding container 6ba46108994f333c2eaed8b7318ba3affc6fa6b62017bc9882e64e3ff5de774e: Status 404 returned error can't find the container with id 6ba46108994f333c2eaed8b7318ba3affc6fa6b62017bc9882e64e3ff5de774e Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.105842 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" event={"ID":"5dbe31ab-6e43-41c3-8f14-e1ac87938038","Type":"ContainerStarted","Data":"625cd8c5eb256cb9eaf3e92302a963b41f00ed4c8ddf4ce13f21d29e6f04cdcb"} Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.133862 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-tmwbx"] Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.159117 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.164856 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.664840514 +0000 UTC m=+51.900684070 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.243073 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:39 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:39 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:39 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.243583 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.259859 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.262820 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.76277619 +0000 UTC m=+51.998619736 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.262964 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.265867 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.765853551 +0000 UTC m=+52.001697097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.268554 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-z8556" podStartSLOduration=18.268528311 podStartE2EDuration="18.268528311s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:39.266908729 +0000 UTC m=+51.502752275" watchObservedRunningTime="2025-12-13 03:12:39.268528311 +0000 UTC m=+51.504371857" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.342369 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b" podStartSLOduration=18.342335969 podStartE2EDuration="18.342335969s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:39.292121735 +0000 UTC m=+51.527965291" watchObservedRunningTime="2025-12-13 03:12:39.342335969 +0000 UTC m=+51.578179515" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.348478 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-z6jvd" podStartSLOduration=18.348464332 podStartE2EDuration="18.348464332s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:39.341384705 +0000 UTC m=+51.577228251" watchObservedRunningTime="2025-12-13 03:12:39.348464332 +0000 UTC m=+51.584307878" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.363958 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.364498 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.864482494 +0000 UTC m=+52.100326040 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.376942 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6dxqs" podStartSLOduration=18.376927793 podStartE2EDuration="18.376927793s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:39.375101185 +0000 UTC m=+51.610944731" watchObservedRunningTime="2025-12-13 03:12:39.376927793 +0000 UTC m=+51.612771339" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.404663 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-bjz8x" podStartSLOduration=18.404647335 podStartE2EDuration="18.404647335s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:39.401636736 +0000 UTC m=+51.637480282" watchObservedRunningTime="2025-12-13 03:12:39.404647335 +0000 UTC m=+51.640490881" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.467389 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.467815 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:39.967801962 +0000 UTC m=+52.203645508 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.489779 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4z9vz" podStartSLOduration=18.489747161 podStartE2EDuration="18.489747161s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:39.485386386 +0000 UTC m=+51.721229932" watchObservedRunningTime="2025-12-13 03:12:39.489747161 +0000 UTC m=+51.725590707" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.535050 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-g4gp9" podStartSLOduration=18.535024806 podStartE2EDuration="18.535024806s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:39.532187541 +0000 UTC m=+51.768031087" watchObservedRunningTime="2025-12-13 03:12:39.535024806 +0000 UTC m=+51.770868352" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.568472 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.568708 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.569038 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.069020893 +0000 UTC m=+52.304864439 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.602880 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" podStartSLOduration=18.602857816 podStartE2EDuration="18.602857816s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:39.573967134 +0000 UTC m=+51.809810670" watchObservedRunningTime="2025-12-13 03:12:39.602857816 +0000 UTC m=+51.838701362" Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.619648 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.672128 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.672611 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.172595558 +0000 UTC m=+52.408439104 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.773744 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.774022 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.273994995 +0000 UTC m=+52.509838541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.774186 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.774610 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.274603771 +0000 UTC m=+52.510447317 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.876047 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.876773 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.376757747 +0000 UTC m=+52.612601293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:39 crc kubenswrapper[5070]: I1213 03:12:39.982110 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:39 crc kubenswrapper[5070]: E1213 03:12:39.982503 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.482492219 +0000 UTC m=+52.718335765 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.085958 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.086265 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.586235237 +0000 UTC m=+52.822078783 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.086705 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.087025 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.587012187 +0000 UTC m=+52.822855733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.137215 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" event={"ID":"b4996b72-90e0-46a0-a0e9-fed852729a89","Type":"ContainerStarted","Data":"fca00a2d27501efe897ba375825d714cb10662d8c2f078be0e23d1fc8941dd45"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.139680 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" event={"ID":"3dd269a0-64a2-4e74-91e2-7edbf5fd5574","Type":"ContainerStarted","Data":"32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.140109 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.191785 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.193026 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.692932924 +0000 UTC m=+52.928776470 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.193349 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:40 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:40 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:40 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.193416 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.210860 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-jgtrr" event={"ID":"74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54","Type":"ContainerStarted","Data":"882fa519b3490e1ec74dde68bc94aabc4249a5449cce6603c2c20e09465ad22e"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.212701 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=1.212684075 podStartE2EDuration="1.212684075s" podCreationTimestamp="2025-12-13 03:12:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:40.210069936 +0000 UTC m=+52.445913512" watchObservedRunningTime="2025-12-13 03:12:40.212684075 +0000 UTC m=+52.448527621" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.244617 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-l9t5h" podStartSLOduration=19.244595668 podStartE2EDuration="19.244595668s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:40.241549667 +0000 UTC m=+52.477393233" watchObservedRunningTime="2025-12-13 03:12:40.244595668 +0000 UTC m=+52.480439214" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.296321 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.296674 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.796659582 +0000 UTC m=+53.032503128 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.338114 5070 generic.go:334] "Generic (PLEG): container finished" podID="e55788c5-581c-4013-8c44-83a0b5f74b0a" containerID="f926650e03142546d680d913da399890e5460c701030864aec20e3d79724564c" exitCode=0 Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.338228 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" event={"ID":"e55788c5-581c-4013-8c44-83a0b5f74b0a","Type":"ContainerDied","Data":"f926650e03142546d680d913da399890e5460c701030864aec20e3d79724564c"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.389574 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" podStartSLOduration=7.389554334 podStartE2EDuration="7.389554334s" podCreationTimestamp="2025-12-13 03:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:40.309983104 +0000 UTC m=+52.545826650" watchObservedRunningTime="2025-12-13 03:12:40.389554334 +0000 UTC m=+52.625397870" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.405056 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.407759 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.907723664 +0000 UTC m=+53.143567210 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.411622 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.412102 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:40.912089579 +0000 UTC m=+53.147933115 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.446678 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" event={"ID":"088b1a9f-bf10-4751-875e-092b9c149cfa","Type":"ContainerStarted","Data":"ecb62431fe7fbe6889c60b6f1f9276736730b7b730530cc466fb5ebd90a0b27a"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.449103 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.487751 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" event={"ID":"994638f3-a23b-445a-b2d9-929361c0a5e3","Type":"ContainerStarted","Data":"555bca5fd21f2ad3e26bf3a1a2d0e04928ca7646d9a0c333a606407c8bb7b1fb"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.514914 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.516049 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:41.016027442 +0000 UTC m=+53.251870988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.602588 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" event={"ID":"47eff15d-d8a1-4f98-8579-eb8cdec723ff","Type":"ContainerStarted","Data":"c4b07edbd18128722adb68452315b0a9dfcbd4133e818ceea29b96d32b411d97"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.618675 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.619039 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:41.119027541 +0000 UTC m=+53.354871087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.702918 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" event={"ID":"1a78b35d-1521-4a5a-9cb0-c73064d59f12","Type":"ContainerStarted","Data":"b9bb56303fa315f944e7735f34fd915bb0cf136b7648e86d0a0b3bb546ae1391"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.719955 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.724159 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:41.224126566 +0000 UTC m=+53.459970112 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.742153 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5clz7" podStartSLOduration=19.742133691 podStartE2EDuration="19.742133691s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:40.669646228 +0000 UTC m=+52.905489774" watchObservedRunningTime="2025-12-13 03:12:40.742133691 +0000 UTC m=+52.977977227" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.790379 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmcfp" podStartSLOduration=19.790357574 podStartE2EDuration="19.790357574s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:40.741880615 +0000 UTC m=+52.977724161" watchObservedRunningTime="2025-12-13 03:12:40.790357574 +0000 UTC m=+53.026201120" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.803203 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-9chqz"] Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.810825 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" event={"ID":"1ba4ef14-c563-4903-b359-f80b487d8ced","Type":"ContainerStarted","Data":"12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.812151 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.821340 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.821826 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:41.321805845 +0000 UTC m=+53.557649391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.841897 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" event={"ID":"d84fad97-769a-4f5d-8e19-d91d308675f6","Type":"ContainerStarted","Data":"ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.842273 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.852603 5070 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-vblq5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.852664 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" podUID="d84fad97-769a-4f5d-8e19-d91d308675f6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.883502 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.883836 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" podStartSLOduration=19.883819891999998 podStartE2EDuration="19.883819892s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:40.883802151 +0000 UTC m=+53.119645697" watchObservedRunningTime="2025-12-13 03:12:40.883819892 +0000 UTC m=+53.119663438" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.922576 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" event={"ID":"3293f634-1926-4bb8-b639-44d6d14263cb","Type":"ContainerStarted","Data":"19dacee25d75d880e089113046a1ea41facd370791996b2781d1f925353c2eb5"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.923133 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.924588 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:40 crc kubenswrapper[5070]: E1213 03:12:40.929596 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:41.429576369 +0000 UTC m=+53.665419915 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.930591 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-n85jm" event={"ID":"a54536a5-e3a5-40e5-8864-f799bdfff6ca","Type":"ContainerStarted","Data":"c352323539134004bdc8499256184fce1e66fa1229fff17cc4020e23e8e64e97"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.951397 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" event={"ID":"5b047179-d09b-486d-8e47-2c110a710e51","Type":"ContainerStarted","Data":"fbd3ff16d50c3b958602f085b5bfcc08703c4a86554373c97ece6cdac2994fb0"} Dec 13 03:12:40 crc kubenswrapper[5070]: I1213 03:12:40.968152 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" podStartSLOduration=19.968137028 podStartE2EDuration="19.968137028s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:40.965506228 +0000 UTC m=+53.201349784" watchObservedRunningTime="2025-12-13 03:12:40.968137028 +0000 UTC m=+53.203980574" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.026314 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:41 crc kubenswrapper[5070]: E1213 03:12:41.027777 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:41.527764291 +0000 UTC m=+53.763607837 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.029365 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" event={"ID":"e96df34c-cff8-4655-9f8c-2f0baf4f772c","Type":"ContainerStarted","Data":"c899483b6b76a0b32381bd32280e759002331dad7e45a516691eabc1ebc3c294"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.106062 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" event={"ID":"04b427fa-011a-4d5c-8844-48bb0c3f319e","Type":"ContainerStarted","Data":"6ba46108994f333c2eaed8b7318ba3affc6fa6b62017bc9882e64e3ff5de774e"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.127615 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:41 crc kubenswrapper[5070]: E1213 03:12:41.128710 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:41.628696026 +0000 UTC m=+53.864539572 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.145792 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" event={"ID":"6dca72ea-876f-4b17-90ee-62f8287ea7d6","Type":"ContainerStarted","Data":"914af8856c112a5ff1eeb85cff3f5b3cbe342f404fc28b2eb263e1821a0fb96d"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.148490 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" podStartSLOduration=20.148467647 podStartE2EDuration="20.148467647s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:41.048847888 +0000 UTC m=+53.284691434" watchObservedRunningTime="2025-12-13 03:12:41.148467647 +0000 UTC m=+53.384311193" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.149167 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" podStartSLOduration=20.149162846 podStartE2EDuration="20.149162846s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:41.120878999 +0000 UTC m=+53.356722545" watchObservedRunningTime="2025-12-13 03:12:41.149162846 +0000 UTC m=+53.385006392" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.187614 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" event={"ID":"a508f977-bce2-4269-ae18-1c655e9befd8","Type":"ContainerStarted","Data":"42059cd06c924a09c00491529b7e2699bd06cd21ee9d557d2472e2cb60259342"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.202245 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:41 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:41 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:41 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.202297 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.202731 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-tmwbx" event={"ID":"86e29ac2-28cb-457d-8f96-5a60c3d535e3","Type":"ContainerStarted","Data":"205a16cc69e5afa6d5f6f5ed9cf393fd846868eca9c04989d04afca250d93d14"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.218074 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6z6fd" podStartSLOduration=20.218056905 podStartE2EDuration="20.218056905s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:41.172333118 +0000 UTC m=+53.408176664" watchObservedRunningTime="2025-12-13 03:12:41.218056905 +0000 UTC m=+53.453900451" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.219326 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dvc5s" podStartSLOduration=20.219319998 podStartE2EDuration="20.219319998s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:41.216292978 +0000 UTC m=+53.452136524" watchObservedRunningTime="2025-12-13 03:12:41.219319998 +0000 UTC m=+53.455163544" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.229302 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:41 crc kubenswrapper[5070]: E1213 03:12:41.231317 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:41.731300935 +0000 UTC m=+53.967144481 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.251992 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" event={"ID":"3cd4a611-b694-4ee2-9290-b62ad8854af3","Type":"ContainerStarted","Data":"0ac12af6e1b54f1dc612f864a2b192b828309c8e9de4ce77f4a7d2206b948bb9"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.252885 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.262041 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" event={"ID":"e0009fee-6991-4819-ae3d-2d075aa961af","Type":"ContainerStarted","Data":"5791208f9c365a6b1e4a9fec8efd6b6f1f3ab36edd0c9592694addeff6bfc9d1"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.262811 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.269406 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" event={"ID":"68d97f24-c17b-4120-be6e-48655b5efe88","Type":"ContainerStarted","Data":"c6bfc029496fabc0584e1bcca259e33d7b31d753e102c781507a565be0d61ef6"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.270600 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrx9b" event={"ID":"9dffbe71-422b-480e-8bf0-e6b89f6daa88","Type":"ContainerStarted","Data":"ba2cc74bd95bf6b9e63a744a2bdc8383c50be45ec57e49d6c7d0330bbcd34276"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.295488 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.304483 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" event={"ID":"9829d6ca-fc22-4e71-a966-cff569f273fb","Type":"ContainerStarted","Data":"980aa2443d8d065fc29777880537110749f39f442a8d945ea5411d61977bd869"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.304878 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx5gc" podStartSLOduration=20.304863916 podStartE2EDuration="20.304863916s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:41.303703046 +0000 UTC m=+53.539546592" watchObservedRunningTime="2025-12-13 03:12:41.304863916 +0000 UTC m=+53.540707462" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.332086 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:41 crc kubenswrapper[5070]: E1213 03:12:41.334071 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:41.834051226 +0000 UTC m=+54.069894772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.334767 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" event={"ID":"44e54907-8f0d-4c3c-960c-e1e5bcad7523","Type":"ContainerStarted","Data":"f8c27f2d9d4ccd04c842fc14d9e5408d7449d55c252b2417c2c7858da25e2a76"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.344178 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" event={"ID":"bbdcfa81-b48d-4067-af2e-0de54cea8c7e","Type":"ContainerStarted","Data":"ae6bd7b60401d1bbef08df7fe9ead938992778922abe3f0960f64e1d26ecb76e"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.397085 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" event={"ID":"6234fc7c-52ec-4021-b04f-0264df7a6307","Type":"ContainerStarted","Data":"8009858eccb397de48ba0d6b760ae7cd236b0a9bb4084a05f4d5f783fee02ae6"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.436683 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:41 crc kubenswrapper[5070]: E1213 03:12:41.442217 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:41.942200752 +0000 UTC m=+54.178044298 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.454192 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" event={"ID":"df8e88fc-38e8-4748-aed5-7fb71bfdb0df","Type":"ContainerStarted","Data":"b39e7749e10bf68434a5557e0adf2a73ea9db0b0d40716afcf10c59150a0a9f6"} Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.455946 5070 patch_prober.go:28] interesting pod/downloads-7954f5f757-bjz8x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.456004 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bjz8x" podUID="93fc381a-a5cc-4d02-bd2a-ba2898536d45" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.469924 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.524068 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-rlmj4" podStartSLOduration=20.524046652 podStartE2EDuration="20.524046652s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:41.50920074 +0000 UTC m=+53.745044286" watchObservedRunningTime="2025-12-13 03:12:41.524046652 +0000 UTC m=+53.759890198" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.525170 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" podStartSLOduration=20.525164272 podStartE2EDuration="20.525164272s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:41.386212244 +0000 UTC m=+53.622055790" watchObservedRunningTime="2025-12-13 03:12:41.525164272 +0000 UTC m=+53.761007818" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.543207 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:41 crc kubenswrapper[5070]: E1213 03:12:41.545258 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:42.045226992 +0000 UTC m=+54.281070708 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.649400 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:41 crc kubenswrapper[5070]: E1213 03:12:41.650224 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:42.150209692 +0000 UTC m=+54.386053228 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.754225 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:41 crc kubenswrapper[5070]: E1213 03:12:41.754651 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:42.25463709 +0000 UTC m=+54.490480636 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.859479 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:41 crc kubenswrapper[5070]: E1213 03:12:41.859827 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:42.359815686 +0000 UTC m=+54.595659232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.890572 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-z445m" podStartSLOduration=20.890547957 podStartE2EDuration="20.890547957s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:41.886022937 +0000 UTC m=+54.121866483" watchObservedRunningTime="2025-12-13 03:12:41.890547957 +0000 UTC m=+54.126391503" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.942045 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" podStartSLOduration=20.942026145 podStartE2EDuration="20.942026145s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:41.940157987 +0000 UTC m=+54.176001533" watchObservedRunningTime="2025-12-13 03:12:41.942026145 +0000 UTC m=+54.177869681" Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.962363 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:41 crc kubenswrapper[5070]: E1213 03:12:41.963266 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:42.463240886 +0000 UTC m=+54.699084432 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:41 crc kubenswrapper[5070]: I1213 03:12:41.999474 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" podStartSLOduration=20.999434811 podStartE2EDuration="20.999434811s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:41.99709506 +0000 UTC m=+54.232938616" watchObservedRunningTime="2025-12-13 03:12:41.999434811 +0000 UTC m=+54.235278357" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.033819 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" podStartSLOduration=21.033802148 podStartE2EDuration="21.033802148s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:42.033412038 +0000 UTC m=+54.269255584" watchObservedRunningTime="2025-12-13 03:12:42.033802148 +0000 UTC m=+54.269645694" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.044809 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.064424 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:42 crc kubenswrapper[5070]: E1213 03:12:42.065209 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:42.565192547 +0000 UTC m=+54.801036093 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.171338 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:42 crc kubenswrapper[5070]: E1213 03:12:42.171830 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:42.671815541 +0000 UTC m=+54.907659087 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.208269 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:42 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:42 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:42 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.208333 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.273110 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:42 crc kubenswrapper[5070]: E1213 03:12:42.273548 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:42.773532947 +0000 UTC m=+55.009376483 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.375015 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:42 crc kubenswrapper[5070]: E1213 03:12:42.375471 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:42.875430957 +0000 UTC m=+55.111274493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.476609 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:42 crc kubenswrapper[5070]: E1213 03:12:42.477473 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:42.97745713 +0000 UTC m=+55.213300676 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.493889 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-jgtrr" event={"ID":"74e8be9f-8aac-4eaf-9d70-b5ffab6f9b54","Type":"ContainerStarted","Data":"9436dcc4e23782809f657a623a18c7d86ec4759a75baa266461f76ab8728d780"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.503584 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" event={"ID":"5dbe31ab-6e43-41c3-8f14-e1ac87938038","Type":"ContainerStarted","Data":"e546dcaee4db8866529a00c086f434df8ab366ad4b6f1ae7f46039c9b9a09f8b"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.503635 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" event={"ID":"5dbe31ab-6e43-41c3-8f14-e1ac87938038","Type":"ContainerStarted","Data":"7cdbf8e58ee71229e7260fea8368eeff65e37bc308824c0c3932da283632e96e"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.514249 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" event={"ID":"56416152-93a7-49cb-a1f0-01577eb6cadc","Type":"ContainerStarted","Data":"66fefbca0e1ccf93280e2452f3960c017ac78140b8c02bbdcc766fdaffb69a59"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.522683 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-jgtrr" podStartSLOduration=9.522666864 podStartE2EDuration="9.522666864s" podCreationTimestamp="2025-12-13 03:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:42.518888023 +0000 UTC m=+54.754731569" watchObservedRunningTime="2025-12-13 03:12:42.522666864 +0000 UTC m=+54.758510410" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.533267 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" event={"ID":"11d847a0-fe18-4f4a-8d65-b0f64b643c68","Type":"ContainerStarted","Data":"6b159dad678009b8c726600e93ff8fb8b01cd53bd0732a088e523eb2d241b277"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.556858 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m7blq" event={"ID":"5b047179-d09b-486d-8e47-2c110a710e51","Type":"ContainerStarted","Data":"4822cee2cb5879327b4e70660037578ac12db99ca5e79b27bc09c8e82e3e8d47"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.569858 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-n85jm" event={"ID":"a54536a5-e3a5-40e5-8864-f799bdfff6ca","Type":"ContainerStarted","Data":"e53bf940e9c24240238df71987b6c27543ac77f07e9bfb4f9f133e60f708db05"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.570838 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.578014 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.578787 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-hrb5f" event={"ID":"8cfebcb5-7935-4f04-8d87-40dce0dc5ef7","Type":"ContainerStarted","Data":"5cce7de377043e7a7c25bb3a7ee8f9c787fd895f3431a53b1376196bdd317a19"} Dec 13 03:12:42 crc kubenswrapper[5070]: E1213 03:12:42.579672 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:43.079652008 +0000 UTC m=+55.315495554 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.600632 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-n4gjw" event={"ID":"df8e88fc-38e8-4748-aed5-7fb71bfdb0df","Type":"ContainerStarted","Data":"86ccc0af110eedd0f4b274db1802d2022a767147899a11b98888eae151339edf"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.619173 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" event={"ID":"a508f977-bce2-4269-ae18-1c655e9befd8","Type":"ContainerStarted","Data":"f66cedcaa4698d08f32b5e5ce802325f758b92ec1d29000d9b7b10e5444ae512"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.640851 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" podStartSLOduration=21.640836033 podStartE2EDuration="21.640836033s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:42.586500568 +0000 UTC m=+54.822344114" watchObservedRunningTime="2025-12-13 03:12:42.640836033 +0000 UTC m=+54.876679579" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.642246 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6td6p" podStartSLOduration=21.64224059 podStartE2EDuration="21.64224059s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:42.639841307 +0000 UTC m=+54.875684843" watchObservedRunningTime="2025-12-13 03:12:42.64224059 +0000 UTC m=+54.878084136" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.655374 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" event={"ID":"44e54907-8f0d-4c3c-960c-e1e5bcad7523","Type":"ContainerStarted","Data":"f705455cc51602042118f56ceb58ad1a07a7e71e886547623575dca7cd610122"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.677534 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" event={"ID":"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0","Type":"ContainerStarted","Data":"10d87b2a0ea59696056bcf437464c155ed614ff1e400668f5216984d59415dc1"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.677597 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" event={"ID":"331fdd7d-1eb1-4a86-ace3-75fe6d2a50e0","Type":"ContainerStarted","Data":"c0ac24b07ded3251bc22fc68358c9b88efcc787677879370443a97b4f2764097"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.679828 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:42 crc kubenswrapper[5070]: E1213 03:12:42.693303 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:43.193280368 +0000 UTC m=+55.429123914 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.693613 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ckvrb" podStartSLOduration=21.693592356 podStartE2EDuration="21.693592356s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:42.689814825 +0000 UTC m=+54.925658371" watchObservedRunningTime="2025-12-13 03:12:42.693592356 +0000 UTC m=+54.929435892" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.694865 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-dlrd7" event={"ID":"6234fc7c-52ec-4021-b04f-0264df7a6307","Type":"ContainerStarted","Data":"447da7458fc2f17a4a7ea10952f6c8c61dc8aed3f71cdd42559dce3a2cd6342c"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.710930 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" event={"ID":"7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82","Type":"ContainerStarted","Data":"e0cfaa8c8fc9383b579c348f21ec4ea133fc59601d1a7a604b05ec67df47e25d"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.713410 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.716271 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6fssx" event={"ID":"d0e149ba-a883-49ad-a21f-40b99873662b","Type":"ContainerStarted","Data":"719de45217b4277570af5d45b378968dbaeb9ecf41651a5dc658b12093f0c2bb"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.717584 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" event={"ID":"79783d23-8a02-4af8-bc4b-8e8f74dae08e","Type":"ContainerStarted","Data":"a83ccbf82efd91871baed9a830243107df5cb44e6136619ea60b5fb2e943e2eb"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.722828 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-hrb5f" podStartSLOduration=9.722804237 podStartE2EDuration="9.722804237s" podCreationTimestamp="2025-12-13 03:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:42.715590557 +0000 UTC m=+54.951434103" watchObservedRunningTime="2025-12-13 03:12:42.722804237 +0000 UTC m=+54.958647783" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.723124 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.723172 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-tmwbx" event={"ID":"86e29ac2-28cb-457d-8f96-5a60c3d535e3","Type":"ContainerStarted","Data":"f75228e068410a584a3201c8100f9cb5618fab183bc78f7dc67744365d319036"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.723203 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" event={"ID":"e55788c5-581c-4013-8c44-83a0b5f74b0a","Type":"ContainerStarted","Data":"9d1df5ecf71e6c3bd8163b875755be100843c4c652be1864ea60c127e7d3262d"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.736481 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" event={"ID":"088b1a9f-bf10-4751-875e-092b9c149cfa","Type":"ContainerStarted","Data":"6aea83a0993a5a77de45bf849ee627ca8b9b3efff0e1d8075a24fb477158ae61"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.747539 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" event={"ID":"994638f3-a23b-445a-b2d9-929361c0a5e3","Type":"ContainerStarted","Data":"29a3f9ebbd5287ba2b27c46fd91c8e0d71bd254786aa6bb49404c6b258f22c2e"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.747581 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" event={"ID":"994638f3-a23b-445a-b2d9-929361c0a5e3","Type":"ContainerStarted","Data":"f819be2640192c04f1770cf78e6308ae2d2269e41495819f49b1b98b9cd785e3"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.758355 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" event={"ID":"04b427fa-011a-4d5c-8844-48bb0c3f319e","Type":"ContainerStarted","Data":"422c4f350594db0e9d44948c47eb0814c120d0ff24f22b46abdc2275f935ecab"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.758405 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" event={"ID":"04b427fa-011a-4d5c-8844-48bb0c3f319e","Type":"ContainerStarted","Data":"d8ca3a084f196706c106b7644dbc84cba5478969134789337fb521a371953963"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.759221 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.763938 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" event={"ID":"849cc5eb-fe81-4755-a13e-56cdc7b4f248","Type":"ContainerStarted","Data":"189471143cb7c9cae70c78f881bb0a4dacc352e6acc46eed8690de507cf23ab2"} Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.765212 5070 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-vblq5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.765276 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" podUID="d84fad97-769a-4f5d-8e19-d91d308675f6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.789376 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:42 crc kubenswrapper[5070]: E1213 03:12:42.796916 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:43.296893083 +0000 UTC m=+55.532736629 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.813588 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.849934 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" podStartSLOduration=21.849913573 podStartE2EDuration="21.849913573s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:42.792992689 +0000 UTC m=+55.028836265" watchObservedRunningTime="2025-12-13 03:12:42.849913573 +0000 UTC m=+55.085757119" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.877915 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-n85jm" podStartSLOduration=9.877873441 podStartE2EDuration="9.877873441s" podCreationTimestamp="2025-12-13 03:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:42.849044209 +0000 UTC m=+55.084887775" watchObservedRunningTime="2025-12-13 03:12:42.877873441 +0000 UTC m=+55.113716987" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.897381 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:42 crc kubenswrapper[5070]: E1213 03:12:42.901091 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:43.401075293 +0000 UTC m=+55.636918839 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.945790 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" podStartSLOduration=21.945773223 podStartE2EDuration="21.945773223s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:42.881306131 +0000 UTC m=+55.117149677" watchObservedRunningTime="2025-12-13 03:12:42.945773223 +0000 UTC m=+55.181616769" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.947342 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rx625"] Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.948237 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:42 crc kubenswrapper[5070]: I1213 03:12:42.984474 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.000696 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.000996 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kn69\" (UniqueName: \"kubernetes.io/projected/9e4db140-cd43-41ac-a1d7-1913a66ba814-kube-api-access-4kn69\") pod \"community-operators-rx625\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.001050 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-catalog-content\") pod \"community-operators-rx625\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.001110 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-utilities\") pod \"community-operators-rx625\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.001228 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:43.501207957 +0000 UTC m=+55.737051513 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.063692 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-5wtk8" podStartSLOduration=22.063674485 podStartE2EDuration="22.063674485s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:43.036773105 +0000 UTC m=+55.272616651" watchObservedRunningTime="2025-12-13 03:12:43.063674485 +0000 UTC m=+55.299518031" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.065732 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rx625"] Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.101849 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.101891 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-catalog-content\") pod \"community-operators-rx625\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.101964 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-utilities\") pod \"community-operators-rx625\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.102000 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kn69\" (UniqueName: \"kubernetes.io/projected/9e4db140-cd43-41ac-a1d7-1913a66ba814-kube-api-access-4kn69\") pod \"community-operators-rx625\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.102519 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:43.60250886 +0000 UTC m=+55.838352406 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.102982 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-catalog-content\") pod \"community-operators-rx625\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.104034 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-utilities\") pod \"community-operators-rx625\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.137538 5070 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-jj2l2 container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.137617 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" podUID="3293f634-1926-4bb8-b639-44d6d14263cb" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.137554 5070 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-jj2l2 container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.138502 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" podUID="3293f634-1926-4bb8-b639-44d6d14263cb" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.159829 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-fhh5m" podStartSLOduration=22.159809083 podStartE2EDuration="22.159809083s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:43.133579501 +0000 UTC m=+55.369423047" watchObservedRunningTime="2025-12-13 03:12:43.159809083 +0000 UTC m=+55.395652629" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.161117 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lfs78"] Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.163588 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.171943 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.196647 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lfs78"] Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.197419 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kn69\" (UniqueName: \"kubernetes.io/projected/9e4db140-cd43-41ac-a1d7-1913a66ba814-kube-api-access-4kn69\") pod \"community-operators-rx625\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.199430 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:43 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:43 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:43 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.199515 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.203835 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.204154 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-catalog-content\") pod \"certified-operators-lfs78\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.204245 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-utilities\") pod \"certified-operators-lfs78\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.204319 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6csrd\" (UniqueName: \"kubernetes.io/projected/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-kube-api-access-6csrd\") pod \"certified-operators-lfs78\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.204485 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:43.704466221 +0000 UTC m=+55.940309767 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.226743 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jj2l2" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.239017 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kpb6p" podStartSLOduration=22.238990963 podStartE2EDuration="22.238990963s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:43.237932965 +0000 UTC m=+55.473776511" watchObservedRunningTime="2025-12-13 03:12:43.238990963 +0000 UTC m=+55.474834509" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.273828 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx625" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.305415 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v8pzk"] Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.306958 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.307001 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6csrd\" (UniqueName: \"kubernetes.io/projected/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-kube-api-access-6csrd\") pod \"certified-operators-lfs78\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.307039 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-catalog-content\") pod \"certified-operators-lfs78\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.307083 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-utilities\") pod \"certified-operators-lfs78\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.307158 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.307485 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-utilities\") pod \"certified-operators-lfs78\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.307736 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:43.807725707 +0000 UTC m=+56.043569253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.308256 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-catalog-content\") pod \"certified-operators-lfs78\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.308989 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-h8tb8" podStartSLOduration=22.30896856 podStartE2EDuration="22.30896856s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:43.307959034 +0000 UTC m=+55.543802590" watchObservedRunningTime="2025-12-13 03:12:43.30896856 +0000 UTC m=+55.544812106" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.378940 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v8pzk"] Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.401362 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6csrd\" (UniqueName: \"kubernetes.io/projected/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-kube-api-access-6csrd\") pod \"certified-operators-lfs78\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.402390 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" podStartSLOduration=22.402376146 podStartE2EDuration="22.402376146s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:43.37641782 +0000 UTC m=+55.612261366" watchObservedRunningTime="2025-12-13 03:12:43.402376146 +0000 UTC m=+55.638219692" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.422984 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.423751 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:43.923725459 +0000 UTC m=+56.159569005 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.424004 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-utilities\") pod \"community-operators-v8pzk\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.424057 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rljcj\" (UniqueName: \"kubernetes.io/projected/665082fa-79a0-4ddb-83f8-0f45ad96de11-kube-api-access-rljcj\") pod \"community-operators-v8pzk\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.424120 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.424163 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-catalog-content\") pod \"community-operators-v8pzk\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.424802 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:43.924789278 +0000 UTC m=+56.160632824 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.443310 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vx28k" podStartSLOduration=22.443293606 podStartE2EDuration="22.443293606s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:43.441424027 +0000 UTC m=+55.677267583" watchObservedRunningTime="2025-12-13 03:12:43.443293606 +0000 UTC m=+55.679137152" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.514756 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.528398 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.528567 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rljcj\" (UniqueName: \"kubernetes.io/projected/665082fa-79a0-4ddb-83f8-0f45ad96de11-kube-api-access-rljcj\") pod \"community-operators-v8pzk\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.528624 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-catalog-content\") pod \"community-operators-v8pzk\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.528697 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-utilities\") pod \"community-operators-v8pzk\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.529125 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-utilities\") pod \"community-operators-v8pzk\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.529192 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:44.029177553 +0000 UTC m=+56.265021099 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.529677 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-catalog-content\") pod \"community-operators-v8pzk\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.542951 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-j9rhx"] Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.543946 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.545965 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" podStartSLOduration=22.545955606 podStartE2EDuration="22.545955606s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:43.528075005 +0000 UTC m=+55.763918551" watchObservedRunningTime="2025-12-13 03:12:43.545955606 +0000 UTC m=+55.781799152" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.564949 5070 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.573272 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rljcj\" (UniqueName: \"kubernetes.io/projected/665082fa-79a0-4ddb-83f8-0f45ad96de11-kube-api-access-rljcj\") pod \"community-operators-v8pzk\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.586703 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-2sbvn" podStartSLOduration=22.586688552 podStartE2EDuration="22.586688552s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:43.584843443 +0000 UTC m=+55.820686989" watchObservedRunningTime="2025-12-13 03:12:43.586688552 +0000 UTC m=+55.822532098" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.591224 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j9rhx"] Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.632206 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-catalog-content\") pod \"certified-operators-j9rhx\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.632280 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.632302 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-utilities\") pod \"certified-operators-j9rhx\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.632394 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbtvm\" (UniqueName: \"kubernetes.io/projected/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-kube-api-access-vbtvm\") pod \"certified-operators-j9rhx\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.632743 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:44.132729187 +0000 UTC m=+56.368572733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.646696 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-tmwbx" podStartSLOduration=22.646675985 podStartE2EDuration="22.646675985s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:43.645831113 +0000 UTC m=+55.881674679" watchObservedRunningTime="2025-12-13 03:12:43.646675985 +0000 UTC m=+55.882519531" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.684303 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.714171 5070 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-wm2b8 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.714565 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" podUID="7eb9db2d-0c78-4df0-ad39-1b3d6bc75d82" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.22:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.733554 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.733853 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbtvm\" (UniqueName: \"kubernetes.io/projected/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-kube-api-access-vbtvm\") pod \"certified-operators-j9rhx\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.733914 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-catalog-content\") pod \"certified-operators-j9rhx\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.733968 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-utilities\") pod \"certified-operators-j9rhx\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.734473 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-utilities\") pod \"certified-operators-j9rhx\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.734563 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:44.234545794 +0000 UTC m=+56.470389350 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.735129 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-catalog-content\") pod \"certified-operators-j9rhx\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.787330 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbtvm\" (UniqueName: \"kubernetes.io/projected/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-kube-api-access-vbtvm\") pod \"certified-operators-j9rhx\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.828145 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6fssx" event={"ID":"d0e149ba-a883-49ad-a21f-40b99873662b","Type":"ContainerStarted","Data":"1e8c550f22bd6d8a66fb08cdce2bc0390a2407bde5ea465444850e81689bab97"} Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.835135 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.835607 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:44.335591292 +0000 UTC m=+56.571434838 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.868723 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-tmwbx" event={"ID":"86e29ac2-28cb-457d-8f96-5a60c3d535e3","Type":"ContainerStarted","Data":"b432cb0ee7c8599a15c01a98aa273bcee42b7bbc4acc7f7c660cc448bb57cc53"} Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.887732 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.928210 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" event={"ID":"e55788c5-581c-4013-8c44-83a0b5f74b0a","Type":"ContainerStarted","Data":"452fef5011f5aeb06220e6ec0f2138ee54600189e6abd64e86dbde5cb98fa006"} Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.930067 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" podUID="3dd269a0-64a2-4e74-91e2-7edbf5fd5574" containerName="kube-multus-additional-cni-plugins" containerID="cri-o://32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" gracePeriod=30 Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.935810 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.935874 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 03:12:44.435858389 +0000 UTC m=+56.671701935 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.936836 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:43 crc kubenswrapper[5070]: E1213 03:12:43.938075 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 03:12:44.438063717 +0000 UTC m=+56.673907263 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xkp8n" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.951285 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.963967 5070 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-13T03:12:43.564971028Z","Handler":null,"Name":""} Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.974249 5070 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 13 03:12:43 crc kubenswrapper[5070]: I1213 03:12:43.974312 5070 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.037763 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.083951 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.142382 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.198783 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:44 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:44 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:44 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.198844 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.201311 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.325500 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lfs78"] Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.361526 5070 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.361914 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.398674 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wm2b8" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.488219 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v8pzk"] Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.634675 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xkp8n\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.772199 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rx625"] Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.774509 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j9rhx"] Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.809388 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.901530 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h5qcs"] Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.902553 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.907863 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.915305 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5qcs"] Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.955008 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfs78" event={"ID":"2782a41c-ce36-4c0d-89c1-27b5e12e9b00","Type":"ContainerStarted","Data":"53e21aaf24da75639aa694e5c2a14fd1d30ffd671d9bec0d7f119ef9f7ae26c9"} Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.955058 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfs78" event={"ID":"2782a41c-ce36-4c0d-89c1-27b5e12e9b00","Type":"ContainerStarted","Data":"0b96368ad8167e8d78f1219c30e22e9471616706e154f0515c9158089933f583"} Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.956825 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx625" event={"ID":"9e4db140-cd43-41ac-a1d7-1913a66ba814","Type":"ContainerStarted","Data":"ae93555c1594239493e557b701b078462447b7572ca489a1fb9f03ca596a288e"} Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.958479 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j9rhx" event={"ID":"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3","Type":"ContainerStarted","Data":"d4f471a17488ca77df509771673b3bc38f09dcb13b2e927d2ef3936441a55354"} Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.971758 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8pzk" event={"ID":"665082fa-79a0-4ddb-83f8-0f45ad96de11","Type":"ContainerStarted","Data":"6ec479fd8968db65f340b97d39704962d4aa1069cca4d0b041984eca85fe189b"} Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.974470 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4tlq\" (UniqueName: \"kubernetes.io/projected/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-kube-api-access-q4tlq\") pod \"redhat-marketplace-h5qcs\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.974536 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-catalog-content\") pod \"redhat-marketplace-h5qcs\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.974575 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-utilities\") pod \"redhat-marketplace-h5qcs\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:44 crc kubenswrapper[5070]: I1213 03:12:44.979528 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6fssx" event={"ID":"d0e149ba-a883-49ad-a21f-40b99873662b","Type":"ContainerStarted","Data":"e16d619a0bc1cfaf65695ed6b25f3f57a095e3154f83273a82098c0145a66447"} Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.079847 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-utilities\") pod \"redhat-marketplace-h5qcs\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.080277 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4tlq\" (UniqueName: \"kubernetes.io/projected/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-kube-api-access-q4tlq\") pod \"redhat-marketplace-h5qcs\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.080346 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-catalog-content\") pod \"redhat-marketplace-h5qcs\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.080957 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-catalog-content\") pod \"redhat-marketplace-h5qcs\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.084328 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-utilities\") pod \"redhat-marketplace-h5qcs\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.139774 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4tlq\" (UniqueName: \"kubernetes.io/projected/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-kube-api-access-q4tlq\") pod \"redhat-marketplace-h5qcs\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.194760 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:45 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:45 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:45 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.194814 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.233735 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.282503 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wvqnd"] Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.283492 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.302413 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvqnd"] Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.384259 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-utilities\") pod \"redhat-marketplace-wvqnd\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.384395 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-catalog-content\") pod \"redhat-marketplace-wvqnd\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.384425 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8s4b\" (UniqueName: \"kubernetes.io/projected/9fd8bef8-6268-421f-86dd-03a873a347ac-kube-api-access-g8s4b\") pod \"redhat-marketplace-wvqnd\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: W1213 03:12:45.473251 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod472601ba_cee2_4f6e_ac53_a5606ef0469f.slice/crio-695c93ed41796d06f3f3e40bb3a524d05eafd286f3d72329296fd20a8c146ff8 WatchSource:0}: Error finding container 695c93ed41796d06f3f3e40bb3a524d05eafd286f3d72329296fd20a8c146ff8: Status 404 returned error can't find the container with id 695c93ed41796d06f3f3e40bb3a524d05eafd286f3d72329296fd20a8c146ff8 Dec 13 03:12:45 crc kubenswrapper[5070]: W1213 03:12:45.476935 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda88af4a7_bdb6_4fca_b7fb_c6fe7f85bc4a.slice/crio-012d9feee5c506951274f8ed0abda406596ce4f6a63d5f11f4549cc587827945 WatchSource:0}: Error finding container 012d9feee5c506951274f8ed0abda406596ce4f6a63d5f11f4549cc587827945: Status 404 returned error can't find the container with id 012d9feee5c506951274f8ed0abda406596ce4f6a63d5f11f4549cc587827945 Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.479844 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xkp8n"] Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.482602 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5qcs"] Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.485096 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-utilities\") pod \"redhat-marketplace-wvqnd\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.485182 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-catalog-content\") pod \"redhat-marketplace-wvqnd\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.485216 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8s4b\" (UniqueName: \"kubernetes.io/projected/9fd8bef8-6268-421f-86dd-03a873a347ac-kube-api-access-g8s4b\") pod \"redhat-marketplace-wvqnd\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.485719 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-catalog-content\") pod \"redhat-marketplace-wvqnd\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.486044 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-utilities\") pod \"redhat-marketplace-wvqnd\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.505140 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8s4b\" (UniqueName: \"kubernetes.io/projected/9fd8bef8-6268-421f-86dd-03a873a347ac-kube-api-access-g8s4b\") pod \"redhat-marketplace-wvqnd\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.667242 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.892782 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvqnd"] Dec 13 03:12:45 crc kubenswrapper[5070]: W1213 03:12:45.901037 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fd8bef8_6268_421f_86dd_03a873a347ac.slice/crio-5f5e1a962409c5412481aabfbbcd800acbad711d8c4699e474629f20578e1c13 WatchSource:0}: Error finding container 5f5e1a962409c5412481aabfbbcd800acbad711d8c4699e474629f20578e1c13: Status 404 returned error can't find the container with id 5f5e1a962409c5412481aabfbbcd800acbad711d8c4699e474629f20578e1c13 Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.987109 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" event={"ID":"472601ba-cee2-4f6e-ac53-a5606ef0469f","Type":"ContainerStarted","Data":"72f7f5fc3896b38a456efbaf34e0d33b80ea467dddb8f144d01bff0876327ec6"} Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.987388 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" event={"ID":"472601ba-cee2-4f6e-ac53-a5606ef0469f","Type":"ContainerStarted","Data":"695c93ed41796d06f3f3e40bb3a524d05eafd286f3d72329296fd20a8c146ff8"} Dec 13 03:12:45 crc kubenswrapper[5070]: I1213 03:12:45.987412 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.004756 5070 generic.go:334] "Generic (PLEG): container finished" podID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerID="55dd37681d604a71243c4bfbaa0ff718af4790c1ea5c7c0132dedff80adba549" exitCode=0 Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.004818 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j9rhx" event={"ID":"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3","Type":"ContainerDied","Data":"55dd37681d604a71243c4bfbaa0ff718af4790c1ea5c7c0132dedff80adba549"} Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.006302 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.009410 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6fssx" event={"ID":"d0e149ba-a883-49ad-a21f-40b99873662b","Type":"ContainerStarted","Data":"5ccdeedaadeeb5983c24899c9be2a9c96896c616af82825288a0b4ccce075ce9"} Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.011046 5070 generic.go:334] "Generic (PLEG): container finished" podID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerID="53e21aaf24da75639aa694e5c2a14fd1d30ffd671d9bec0d7f119ef9f7ae26c9" exitCode=0 Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.011103 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfs78" event={"ID":"2782a41c-ce36-4c0d-89c1-27b5e12e9b00","Type":"ContainerDied","Data":"53e21aaf24da75639aa694e5c2a14fd1d30ffd671d9bec0d7f119ef9f7ae26c9"} Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.014249 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" podStartSLOduration=25.014227993 podStartE2EDuration="25.014227993s" podCreationTimestamp="2025-12-13 03:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:46.013366731 +0000 UTC m=+58.249210297" watchObservedRunningTime="2025-12-13 03:12:46.014227993 +0000 UTC m=+58.250071539" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.021856 5070 generic.go:334] "Generic (PLEG): container finished" podID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerID="4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a" exitCode=0 Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.021912 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5qcs" event={"ID":"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a","Type":"ContainerDied","Data":"4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a"} Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.021966 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5qcs" event={"ID":"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a","Type":"ContainerStarted","Data":"012d9feee5c506951274f8ed0abda406596ce4f6a63d5f11f4549cc587827945"} Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.026738 5070 generic.go:334] "Generic (PLEG): container finished" podID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerID="3d240eac20f24c2a6cf32bbfc86692f4294d999b1a49358ffc05f4f323b9186c" exitCode=0 Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.026830 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx625" event={"ID":"9e4db140-cd43-41ac-a1d7-1913a66ba814","Type":"ContainerDied","Data":"3d240eac20f24c2a6cf32bbfc86692f4294d999b1a49358ffc05f4f323b9186c"} Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.029007 5070 generic.go:334] "Generic (PLEG): container finished" podID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerID="ebb4455c6948c1ab50193164599109b00da3724dc443422688906b4786f13e02" exitCode=0 Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.029053 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8pzk" event={"ID":"665082fa-79a0-4ddb-83f8-0f45ad96de11","Type":"ContainerDied","Data":"ebb4455c6948c1ab50193164599109b00da3724dc443422688906b4786f13e02"} Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.038169 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvqnd" event={"ID":"9fd8bef8-6268-421f-86dd-03a873a347ac","Type":"ContainerStarted","Data":"5f5e1a962409c5412481aabfbbcd800acbad711d8c4699e474629f20578e1c13"} Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.047030 5070 generic.go:334] "Generic (PLEG): container finished" podID="11d847a0-fe18-4f4a-8d65-b0f64b643c68" containerID="6b159dad678009b8c726600e93ff8fb8b01cd53bd0732a088e523eb2d241b277" exitCode=0 Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.047359 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" event={"ID":"11d847a0-fe18-4f4a-8d65-b0f64b643c68","Type":"ContainerDied","Data":"6b159dad678009b8c726600e93ff8fb8b01cd53bd0732a088e523eb2d241b277"} Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.084369 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-6fssx" podStartSLOduration=13.084352284 podStartE2EDuration="13.084352284s" podCreationTimestamp="2025-12-13 03:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:46.065560938 +0000 UTC m=+58.301404474" watchObservedRunningTime="2025-12-13 03:12:46.084352284 +0000 UTC m=+58.320195830" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.084933 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q5mgn"] Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.087330 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.089226 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.099481 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q5mgn"] Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.183764 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.185461 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:46 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:46 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:46 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.185518 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.195028 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-catalog-content\") pod \"redhat-operators-q5mgn\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.195066 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-utilities\") pod \"redhat-operators-q5mgn\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.195101 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzx8v\" (UniqueName: \"kubernetes.io/projected/383cb754-9d42-43dd-9cb7-5238fec04ce5-kube-api-access-hzx8v\") pod \"redhat-operators-q5mgn\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.205518 5070 patch_prober.go:28] interesting pod/downloads-7954f5f757-bjz8x container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.205553 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bjz8x" podUID="93fc381a-a5cc-4d02-bd2a-ba2898536d45" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.205726 5070 patch_prober.go:28] interesting pod/downloads-7954f5f757-bjz8x container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.205755 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-bjz8x" podUID="93fc381a-a5cc-4d02-bd2a-ba2898536d45" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.296862 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-catalog-content\") pod \"redhat-operators-q5mgn\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.296905 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-utilities\") pod \"redhat-operators-q5mgn\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.296927 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzx8v\" (UniqueName: \"kubernetes.io/projected/383cb754-9d42-43dd-9cb7-5238fec04ce5-kube-api-access-hzx8v\") pod \"redhat-operators-q5mgn\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.298691 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-catalog-content\") pod \"redhat-operators-q5mgn\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.298750 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-utilities\") pod \"redhat-operators-q5mgn\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.323801 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.324063 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.333573 5070 patch_prober.go:28] interesting pod/console-f9d7485db-z6jvd container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.333627 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-z6jvd" podUID="7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" containerName="console" probeResult="failure" output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.335220 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzx8v\" (UniqueName: \"kubernetes.io/projected/383cb754-9d42-43dd-9cb7-5238fec04ce5-kube-api-access-hzx8v\") pod \"redhat-operators-q5mgn\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.335797 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.335829 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.342971 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.349840 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.350503 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.353488 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.353794 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.367102 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.398311 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55c96a7c-a7ce-4521-8309-05987e0a40d9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"55c96a7c-a7ce-4521-8309-05987e0a40d9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.398581 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55c96a7c-a7ce-4521-8309-05987e0a40d9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"55c96a7c-a7ce-4521-8309-05987e0a40d9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.474997 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.496407 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pd84r"] Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.498337 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.499680 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55c96a7c-a7ce-4521-8309-05987e0a40d9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"55c96a7c-a7ce-4521-8309-05987e0a40d9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.499892 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55c96a7c-a7ce-4521-8309-05987e0a40d9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"55c96a7c-a7ce-4521-8309-05987e0a40d9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.500510 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55c96a7c-a7ce-4521-8309-05987e0a40d9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"55c96a7c-a7ce-4521-8309-05987e0a40d9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.529377 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55c96a7c-a7ce-4521-8309-05987e0a40d9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"55c96a7c-a7ce-4521-8309-05987e0a40d9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.573005 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pd84r"] Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.602044 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-utilities\") pod \"redhat-operators-pd84r\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.602236 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-catalog-content\") pod \"redhat-operators-pd84r\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.602458 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfsn6\" (UniqueName: \"kubernetes.io/projected/9748a82a-6eee-4d58-b638-9def1ee4bb38-kube-api-access-sfsn6\") pod \"redhat-operators-pd84r\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.681450 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.704528 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-utilities\") pod \"redhat-operators-pd84r\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.704611 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-catalog-content\") pod \"redhat-operators-pd84r\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.704700 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfsn6\" (UniqueName: \"kubernetes.io/projected/9748a82a-6eee-4d58-b638-9def1ee4bb38-kube-api-access-sfsn6\") pod \"redhat-operators-pd84r\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.705537 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-utilities\") pod \"redhat-operators-pd84r\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.708964 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-catalog-content\") pod \"redhat-operators-pd84r\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.727918 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfsn6\" (UniqueName: \"kubernetes.io/projected/9748a82a-6eee-4d58-b638-9def1ee4bb38-kube-api-access-sfsn6\") pod \"redhat-operators-pd84r\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.832634 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q5mgn"] Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.850619 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.852375 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.854635 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.854914 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.855753 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.870626 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:12:46 crc kubenswrapper[5070]: W1213 03:12:46.888008 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod383cb754_9d42_43dd_9cb7_5238fec04ce5.slice/crio-f2d7938b95023d5d136f305cf6ba89da4a8d1b17da306d121ba950e6140e05ba WatchSource:0}: Error finding container f2d7938b95023d5d136f305cf6ba89da4a8d1b17da306d121ba950e6140e05ba: Status 404 returned error can't find the container with id f2d7938b95023d5d136f305cf6ba89da4a8d1b17da306d121ba950e6140e05ba Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.908384 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d75d0b70-8a24-4f73-93b3-190d058c7dc0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 03:12:46 crc kubenswrapper[5070]: I1213 03:12:46.908460 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d75d0b70-8a24-4f73-93b3-190d058c7dc0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.009318 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d75d0b70-8a24-4f73-93b3-190d058c7dc0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.009368 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d75d0b70-8a24-4f73-93b3-190d058c7dc0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.009756 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d75d0b70-8a24-4f73-93b3-190d058c7dc0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.055102 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q5mgn" event={"ID":"383cb754-9d42-43dd-9cb7-5238fec04ce5","Type":"ContainerStarted","Data":"f2d7938b95023d5d136f305cf6ba89da4a8d1b17da306d121ba950e6140e05ba"} Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.058188 5070 generic.go:334] "Generic (PLEG): container finished" podID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerID="9f0c23ae0bd181978b27c50199bd9280a8d04966134b1371f54b4a7a1439fd2e" exitCode=0 Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.058242 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvqnd" event={"ID":"9fd8bef8-6268-421f-86dd-03a873a347ac","Type":"ContainerDied","Data":"9f0c23ae0bd181978b27c50199bd9280a8d04966134b1371f54b4a7a1439fd2e"} Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.060059 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d75d0b70-8a24-4f73-93b3-190d058c7dc0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.066548 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-s276q" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.181701 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.182242 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.182278 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.185684 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.188090 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:47 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:47 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:47 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.188125 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.193698 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:47 crc kubenswrapper[5070]: E1213 03:12:47.208482 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:12:47 crc kubenswrapper[5070]: E1213 03:12:47.226332 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:12:47 crc kubenswrapper[5070]: E1213 03:12:47.240633 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:12:47 crc kubenswrapper[5070]: E1213 03:12:47.240705 5070 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" podUID="3dd269a0-64a2-4e74-91e2-7edbf5fd5574" containerName="kube-multus-additional-cni-plugins" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.384957 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pd84r"] Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.487059 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.551170 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 13 03:12:47 crc kubenswrapper[5070]: W1213 03:12:47.605211 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podd75d0b70_8a24_4f73_93b3_190d058c7dc0.slice/crio-2991a63c1dd752c979e8a43188d90ab361093396b6468e0b4aa566fd072fcb35 WatchSource:0}: Error finding container 2991a63c1dd752c979e8a43188d90ab361093396b6468e0b4aa566fd072fcb35: Status 404 returned error can't find the container with id 2991a63c1dd752c979e8a43188d90ab361093396b6468e0b4aa566fd072fcb35 Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.622644 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/11d847a0-fe18-4f4a-8d65-b0f64b643c68-secret-volume\") pod \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.622787 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lbn2\" (UniqueName: \"kubernetes.io/projected/11d847a0-fe18-4f4a-8d65-b0f64b643c68-kube-api-access-9lbn2\") pod \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.623017 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11d847a0-fe18-4f4a-8d65-b0f64b643c68-config-volume\") pod \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\" (UID: \"11d847a0-fe18-4f4a-8d65-b0f64b643c68\") " Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.624141 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11d847a0-fe18-4f4a-8d65-b0f64b643c68-config-volume" (OuterVolumeSpecName: "config-volume") pod "11d847a0-fe18-4f4a-8d65-b0f64b643c68" (UID: "11d847a0-fe18-4f4a-8d65-b0f64b643c68"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.638960 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d847a0-fe18-4f4a-8d65-b0f64b643c68-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "11d847a0-fe18-4f4a-8d65-b0f64b643c68" (UID: "11d847a0-fe18-4f4a-8d65-b0f64b643c68"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.640558 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11d847a0-fe18-4f4a-8d65-b0f64b643c68-kube-api-access-9lbn2" (OuterVolumeSpecName: "kube-api-access-9lbn2") pod "11d847a0-fe18-4f4a-8d65-b0f64b643c68" (UID: "11d847a0-fe18-4f4a-8d65-b0f64b643c68"). InnerVolumeSpecName "kube-api-access-9lbn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.724094 5070 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11d847a0-fe18-4f4a-8d65-b0f64b643c68-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.724126 5070 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/11d847a0-fe18-4f4a-8d65-b0f64b643c68-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:47 crc kubenswrapper[5070]: I1213 03:12:47.724136 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lbn2\" (UniqueName: \"kubernetes.io/projected/11d847a0-fe18-4f4a-8d65-b0f64b643c68-kube-api-access-9lbn2\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.106658 5070 generic.go:334] "Generic (PLEG): container finished" podID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerID="a0bc254b578967186a1e1447d3a3b58f98033a2c2d0b1bc3da80b406206907b2" exitCode=0 Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.107673 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pd84r" event={"ID":"9748a82a-6eee-4d58-b638-9def1ee4bb38","Type":"ContainerDied","Data":"a0bc254b578967186a1e1447d3a3b58f98033a2c2d0b1bc3da80b406206907b2"} Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.107704 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pd84r" event={"ID":"9748a82a-6eee-4d58-b638-9def1ee4bb38","Type":"ContainerStarted","Data":"b83857770be10a01d83f60335a98a56d0e1fe94c2e17dcd14384c9c96835e9fb"} Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.116982 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d75d0b70-8a24-4f73-93b3-190d058c7dc0","Type":"ContainerStarted","Data":"79e370230f9e62ce0c5a2b4fbb5ba6cd3c84bc49aa11b7214a97b114faaf69ef"} Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.117038 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d75d0b70-8a24-4f73-93b3-190d058c7dc0","Type":"ContainerStarted","Data":"2991a63c1dd752c979e8a43188d90ab361093396b6468e0b4aa566fd072fcb35"} Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.137657 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" event={"ID":"11d847a0-fe18-4f4a-8d65-b0f64b643c68","Type":"ContainerDied","Data":"82374e85a92c06e0d4effc40a505245b1f37328321811e2921c4984b27ac2f87"} Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.137696 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82374e85a92c06e0d4effc40a505245b1f37328321811e2921c4984b27ac2f87" Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.137763 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs" Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.153625 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.153606538 podStartE2EDuration="2.153606538s" podCreationTimestamp="2025-12-13 03:12:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:48.148900103 +0000 UTC m=+60.384743649" watchObservedRunningTime="2025-12-13 03:12:48.153606538 +0000 UTC m=+60.389450104" Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.158512 5070 generic.go:334] "Generic (PLEG): container finished" podID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerID="6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce" exitCode=0 Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.158588 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q5mgn" event={"ID":"383cb754-9d42-43dd-9cb7-5238fec04ce5","Type":"ContainerDied","Data":"6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce"} Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.216321 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:48 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:48 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:48 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.216744 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.236969 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"55c96a7c-a7ce-4521-8309-05987e0a40d9","Type":"ContainerStarted","Data":"1e777ccb05369c5f4165d65501a72e54e5be99876b5ca2b9123c90b21f4274ec"} Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.237030 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"55c96a7c-a7ce-4521-8309-05987e0a40d9","Type":"ContainerStarted","Data":"7b6ca1e4b1014de3b02f674d49ca11cc7c9c15643b4809b90d7b167d8fc7a15e"} Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.274675 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-7hxcj" Dec 13 03:12:48 crc kubenswrapper[5070]: I1213 03:12:48.281576 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.281557805 podStartE2EDuration="2.281557805s" podCreationTimestamp="2025-12-13 03:12:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:48.280853447 +0000 UTC m=+60.516696993" watchObservedRunningTime="2025-12-13 03:12:48.281557805 +0000 UTC m=+60.517401351" Dec 13 03:12:49 crc kubenswrapper[5070]: I1213 03:12:49.185176 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:49 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:49 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:49 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:49 crc kubenswrapper[5070]: I1213 03:12:49.185679 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:49 crc kubenswrapper[5070]: I1213 03:12:49.205034 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-n85jm" Dec 13 03:12:49 crc kubenswrapper[5070]: I1213 03:12:49.227764 5070 generic.go:334] "Generic (PLEG): container finished" podID="d75d0b70-8a24-4f73-93b3-190d058c7dc0" containerID="79e370230f9e62ce0c5a2b4fbb5ba6cd3c84bc49aa11b7214a97b114faaf69ef" exitCode=0 Dec 13 03:12:49 crc kubenswrapper[5070]: I1213 03:12:49.228742 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d75d0b70-8a24-4f73-93b3-190d058c7dc0","Type":"ContainerDied","Data":"79e370230f9e62ce0c5a2b4fbb5ba6cd3c84bc49aa11b7214a97b114faaf69ef"} Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.006564 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.187112 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:50 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:50 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:50 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.187176 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.239358 5070 generic.go:334] "Generic (PLEG): container finished" podID="55c96a7c-a7ce-4521-8309-05987e0a40d9" containerID="1e777ccb05369c5f4165d65501a72e54e5be99876b5ca2b9123c90b21f4274ec" exitCode=0 Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.239481 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"55c96a7c-a7ce-4521-8309-05987e0a40d9","Type":"ContainerDied","Data":"1e777ccb05369c5f4165d65501a72e54e5be99876b5ca2b9123c90b21f4274ec"} Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.521816 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.596254 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kube-api-access\") pod \"d75d0b70-8a24-4f73-93b3-190d058c7dc0\" (UID: \"d75d0b70-8a24-4f73-93b3-190d058c7dc0\") " Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.596315 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kubelet-dir\") pod \"d75d0b70-8a24-4f73-93b3-190d058c7dc0\" (UID: \"d75d0b70-8a24-4f73-93b3-190d058c7dc0\") " Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.596756 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d75d0b70-8a24-4f73-93b3-190d058c7dc0" (UID: "d75d0b70-8a24-4f73-93b3-190d058c7dc0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.617986 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d75d0b70-8a24-4f73-93b3-190d058c7dc0" (UID: "d75d0b70-8a24-4f73-93b3-190d058c7dc0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.698783 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:50 crc kubenswrapper[5070]: I1213 03:12:50.698821 5070 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d75d0b70-8a24-4f73-93b3-190d058c7dc0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.103557 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.103893 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.103952 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.103989 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.105368 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.106255 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.106578 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.122510 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.126258 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.134161 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.139763 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.186575 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:51 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:51 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:51 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.186647 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.267215 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.267283 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d75d0b70-8a24-4f73-93b3-190d058c7dc0","Type":"ContainerDied","Data":"2991a63c1dd752c979e8a43188d90ab361093396b6468e0b4aa566fd072fcb35"} Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.267322 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2991a63c1dd752c979e8a43188d90ab361093396b6468e0b4aa566fd072fcb35" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.399510 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.416968 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.693142 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.804088 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.822551 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55c96a7c-a7ce-4521-8309-05987e0a40d9-kube-api-access\") pod \"55c96a7c-a7ce-4521-8309-05987e0a40d9\" (UID: \"55c96a7c-a7ce-4521-8309-05987e0a40d9\") " Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.822610 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55c96a7c-a7ce-4521-8309-05987e0a40d9-kubelet-dir\") pod \"55c96a7c-a7ce-4521-8309-05987e0a40d9\" (UID: \"55c96a7c-a7ce-4521-8309-05987e0a40d9\") " Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.822836 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/55c96a7c-a7ce-4521-8309-05987e0a40d9-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "55c96a7c-a7ce-4521-8309-05987e0a40d9" (UID: "55c96a7c-a7ce-4521-8309-05987e0a40d9"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.830426 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55c96a7c-a7ce-4521-8309-05987e0a40d9-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "55c96a7c-a7ce-4521-8309-05987e0a40d9" (UID: "55c96a7c-a7ce-4521-8309-05987e0a40d9"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.923389 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55c96a7c-a7ce-4521-8309-05987e0a40d9-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:51 crc kubenswrapper[5070]: I1213 03:12:51.923417 5070 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55c96a7c-a7ce-4521-8309-05987e0a40d9-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:12:52 crc kubenswrapper[5070]: I1213 03:12:51.998118 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 03:12:52 crc kubenswrapper[5070]: I1213 03:12:52.053953 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:12:52 crc kubenswrapper[5070]: I1213 03:12:52.186921 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:52 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:52 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:52 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:52 crc kubenswrapper[5070]: I1213 03:12:52.186973 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:52 crc kubenswrapper[5070]: I1213 03:12:52.320172 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"55c96a7c-a7ce-4521-8309-05987e0a40d9","Type":"ContainerDied","Data":"7b6ca1e4b1014de3b02f674d49ca11cc7c9c15643b4809b90d7b167d8fc7a15e"} Dec 13 03:12:52 crc kubenswrapper[5070]: I1213 03:12:52.320479 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b6ca1e4b1014de3b02f674d49ca11cc7c9c15643b4809b90d7b167d8fc7a15e" Dec 13 03:12:52 crc kubenswrapper[5070]: I1213 03:12:52.320548 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 03:12:52 crc kubenswrapper[5070]: I1213 03:12:52.344479 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"4b73de6cf35f5c4462b02502bef19411f7d3b469f2ead3fd6c5119acfa81ade6"} Dec 13 03:12:53 crc kubenswrapper[5070]: I1213 03:12:53.185396 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:53 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:53 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:53 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:53 crc kubenswrapper[5070]: I1213 03:12:53.185850 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:53 crc kubenswrapper[5070]: I1213 03:12:53.355102 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"aca15debf93fd1f32af2d840ee25fa8c9386853ebe74fd88e1b372c800274ee3"} Dec 13 03:12:53 crc kubenswrapper[5070]: I1213 03:12:53.356081 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:12:53 crc kubenswrapper[5070]: I1213 03:12:53.358917 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"e6bd49403d4548d4c7d3ef80aedf3ab1901ade4a4c96563e69e892ce6e9f34e2"} Dec 13 03:12:54 crc kubenswrapper[5070]: I1213 03:12:54.185789 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:54 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:54 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:54 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:54 crc kubenswrapper[5070]: I1213 03:12:54.186071 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:55 crc kubenswrapper[5070]: I1213 03:12:55.184764 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:55 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:55 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:55 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:55 crc kubenswrapper[5070]: I1213 03:12:55.185103 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:56 crc kubenswrapper[5070]: I1213 03:12:56.184060 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:56 crc kubenswrapper[5070]: [-]has-synced failed: reason withheld Dec 13 03:12:56 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:56 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:56 crc kubenswrapper[5070]: I1213 03:12:56.184152 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:56 crc kubenswrapper[5070]: I1213 03:12:56.212507 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-bjz8x" Dec 13 03:12:56 crc kubenswrapper[5070]: I1213 03:12:56.324481 5070 patch_prober.go:28] interesting pod/console-f9d7485db-z6jvd container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 13 03:12:56 crc kubenswrapper[5070]: I1213 03:12:56.324723 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-z6jvd" podUID="7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" containerName="console" probeResult="failure" output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 13 03:12:57 crc kubenswrapper[5070]: I1213 03:12:57.185064 5070 patch_prober.go:28] interesting pod/router-default-5444994796-z8556 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 03:12:57 crc kubenswrapper[5070]: [+]has-synced ok Dec 13 03:12:57 crc kubenswrapper[5070]: [+]process-running ok Dec 13 03:12:57 crc kubenswrapper[5070]: healthz check failed Dec 13 03:12:57 crc kubenswrapper[5070]: I1213 03:12:57.185136 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z8556" podUID="bf452261-a66d-45bd-9155-55aa347f086a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 03:12:57 crc kubenswrapper[5070]: I1213 03:12:57.189152 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 13 03:12:57 crc kubenswrapper[5070]: E1213 03:12:57.204554 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:12:57 crc kubenswrapper[5070]: E1213 03:12:57.208046 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:12:57 crc kubenswrapper[5070]: E1213 03:12:57.209752 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:12:57 crc kubenswrapper[5070]: E1213 03:12:57.209869 5070 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" podUID="3dd269a0-64a2-4e74-91e2-7edbf5fd5574" containerName="kube-multus-additional-cni-plugins" Dec 13 03:12:58 crc kubenswrapper[5070]: I1213 03:12:58.187993 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:58 crc kubenswrapper[5070]: I1213 03:12:58.191079 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-z8556" Dec 13 03:12:58 crc kubenswrapper[5070]: I1213 03:12:58.220416 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=1.2203970530000001 podStartE2EDuration="1.220397053s" podCreationTimestamp="2025-12-13 03:12:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:12:58.218759057 +0000 UTC m=+70.454602603" watchObservedRunningTime="2025-12-13 03:12:58.220397053 +0000 UTC m=+70.456240599" Dec 13 03:13:04 crc kubenswrapper[5070]: I1213 03:13:04.816736 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:13:06 crc kubenswrapper[5070]: I1213 03:13:06.335166 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:13:06 crc kubenswrapper[5070]: I1213 03:13:06.339234 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:13:07 crc kubenswrapper[5070]: E1213 03:13:07.205365 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:13:07 crc kubenswrapper[5070]: E1213 03:13:07.207720 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:13:07 crc kubenswrapper[5070]: E1213 03:13:07.209586 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:13:07 crc kubenswrapper[5070]: E1213 03:13:07.209638 5070 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" podUID="3dd269a0-64a2-4e74-91e2-7edbf5fd5574" containerName="kube-multus-additional-cni-plugins" Dec 13 03:13:10 crc kubenswrapper[5070]: I1213 03:13:10.482240 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a99d289669160f5b0d386f70030f105781160054bf7cc1a2a38e611b87792a3a"} Dec 13 03:13:14 crc kubenswrapper[5070]: E1213 03:13:14.097124 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-podd75d0b70_8a24_4f73_93b3_190d058c7dc0.slice/crio-2991a63c1dd752c979e8a43188d90ab361093396b6468e0b4aa566fd072fcb35\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dd269a0_64a2_4e74_91e2_7edbf5fd5574.slice/crio-32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595.scope\": RecentStats: unable to find data in memory cache]" Dec 13 03:13:17 crc kubenswrapper[5070]: E1213 03:13:17.203316 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595 is running failed: container process not found" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:13:17 crc kubenswrapper[5070]: E1213 03:13:17.204002 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595 is running failed: container process not found" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:13:17 crc kubenswrapper[5070]: E1213 03:13:17.204260 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595 is running failed: container process not found" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 13 03:13:17 crc kubenswrapper[5070]: E1213 03:13:17.204286 5070 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" podUID="3dd269a0-64a2-4e74-91e2-7edbf5fd5574" containerName="kube-multus-additional-cni-plugins" Dec 13 03:13:17 crc kubenswrapper[5070]: I1213 03:13:17.527377 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-9chqz_3dd269a0-64a2-4e74-91e2-7edbf5fd5574/kube-multus-additional-cni-plugins/0.log" Dec 13 03:13:17 crc kubenswrapper[5070]: I1213 03:13:17.527436 5070 generic.go:334] "Generic (PLEG): container finished" podID="3dd269a0-64a2-4e74-91e2-7edbf5fd5574" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" exitCode=137 Dec 13 03:13:17 crc kubenswrapper[5070]: I1213 03:13:17.527506 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" event={"ID":"3dd269a0-64a2-4e74-91e2-7edbf5fd5574","Type":"ContainerDied","Data":"32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595"} Dec 13 03:13:17 crc kubenswrapper[5070]: I1213 03:13:17.730069 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-grwvc" Dec 13 03:13:24 crc kubenswrapper[5070]: E1213 03:13:24.270256 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-podd75d0b70_8a24_4f73_93b3_190d058c7dc0.slice/crio-2991a63c1dd752c979e8a43188d90ab361093396b6468e0b4aa566fd072fcb35\": RecentStats: unable to find data in memory cache]" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.062799 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 13 03:13:25 crc kubenswrapper[5070]: E1213 03:13:25.063146 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55c96a7c-a7ce-4521-8309-05987e0a40d9" containerName="pruner" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.063170 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="55c96a7c-a7ce-4521-8309-05987e0a40d9" containerName="pruner" Dec 13 03:13:25 crc kubenswrapper[5070]: E1213 03:13:25.063197 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11d847a0-fe18-4f4a-8d65-b0f64b643c68" containerName="collect-profiles" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.063209 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="11d847a0-fe18-4f4a-8d65-b0f64b643c68" containerName="collect-profiles" Dec 13 03:13:25 crc kubenswrapper[5070]: E1213 03:13:25.063239 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d75d0b70-8a24-4f73-93b3-190d058c7dc0" containerName="pruner" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.063251 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d75d0b70-8a24-4f73-93b3-190d058c7dc0" containerName="pruner" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.063428 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="55c96a7c-a7ce-4521-8309-05987e0a40d9" containerName="pruner" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.063478 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d75d0b70-8a24-4f73-93b3-190d058c7dc0" containerName="pruner" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.063504 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="11d847a0-fe18-4f4a-8d65-b0f64b643c68" containerName="collect-profiles" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.063960 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.064077 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.079183 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.080107 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.135281 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"612826dc-fa90-4c14-9fb8-3ea90be2fdb3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.135435 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"612826dc-fa90-4c14-9fb8-3ea90be2fdb3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.236679 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"612826dc-fa90-4c14-9fb8-3ea90be2fdb3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.236820 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"612826dc-fa90-4c14-9fb8-3ea90be2fdb3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.236934 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"612826dc-fa90-4c14-9fb8-3ea90be2fdb3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.259629 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"612826dc-fa90-4c14-9fb8-3ea90be2fdb3\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 03:13:25 crc kubenswrapper[5070]: I1213 03:13:25.409946 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 03:13:25 crc kubenswrapper[5070]: E1213 03:13:25.877967 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 13 03:13:25 crc kubenswrapper[5070]: E1213 03:13:25.878426 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rljcj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-v8pzk_openshift-marketplace(665082fa-79a0-4ddb-83f8-0f45ad96de11): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 03:13:25 crc kubenswrapper[5070]: E1213 03:13:25.879894 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-v8pzk" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" Dec 13 03:13:25 crc kubenswrapper[5070]: E1213 03:13:25.886788 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 13 03:13:25 crc kubenswrapper[5070]: E1213 03:13:25.886933 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4kn69,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-rx625_openshift-marketplace(9e4db140-cd43-41ac-a1d7-1913a66ba814): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 03:13:25 crc kubenswrapper[5070]: E1213 03:13:25.888240 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-rx625" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" Dec 13 03:13:27 crc kubenswrapper[5070]: E1213 03:13:27.052122 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-v8pzk" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" Dec 13 03:13:27 crc kubenswrapper[5070]: E1213 03:13:27.052640 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-rx625" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.099934 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-9chqz_3dd269a0-64a2-4e74-91e2-7edbf5fd5574/kube-multus-additional-cni-plugins/0.log" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.100021 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.160649 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xfr5\" (UniqueName: \"kubernetes.io/projected/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-kube-api-access-5xfr5\") pod \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.160725 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-tuning-conf-dir\") pod \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.160754 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-cni-sysctl-allowlist\") pod \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.160913 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-ready\") pod \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\" (UID: \"3dd269a0-64a2-4e74-91e2-7edbf5fd5574\") " Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.161803 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-ready" (OuterVolumeSpecName: "ready") pod "3dd269a0-64a2-4e74-91e2-7edbf5fd5574" (UID: "3dd269a0-64a2-4e74-91e2-7edbf5fd5574"). InnerVolumeSpecName "ready". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.162512 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-tuning-conf-dir" (OuterVolumeSpecName: "tuning-conf-dir") pod "3dd269a0-64a2-4e74-91e2-7edbf5fd5574" (UID: "3dd269a0-64a2-4e74-91e2-7edbf5fd5574"). InnerVolumeSpecName "tuning-conf-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.163425 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "3dd269a0-64a2-4e74-91e2-7edbf5fd5574" (UID: "3dd269a0-64a2-4e74-91e2-7edbf5fd5574"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.169023 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-kube-api-access-5xfr5" (OuterVolumeSpecName: "kube-api-access-5xfr5") pod "3dd269a0-64a2-4e74-91e2-7edbf5fd5574" (UID: "3dd269a0-64a2-4e74-91e2-7edbf5fd5574"). InnerVolumeSpecName "kube-api-access-5xfr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:13:27 crc kubenswrapper[5070]: E1213 03:13:27.204813 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 13 03:13:27 crc kubenswrapper[5070]: E1213 03:13:27.205264 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g8s4b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-wvqnd_openshift-marketplace(9fd8bef8-6268-421f-86dd-03a873a347ac): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 03:13:27 crc kubenswrapper[5070]: E1213 03:13:27.206494 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-wvqnd" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.262993 5070 reconciler_common.go:293] "Volume detached for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-ready\") on node \"crc\" DevicePath \"\"" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.263036 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xfr5\" (UniqueName: \"kubernetes.io/projected/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-kube-api-access-5xfr5\") on node \"crc\" DevicePath \"\"" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.263050 5070 reconciler_common.go:293] "Volume detached for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-tuning-conf-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.263061 5070 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3dd269a0-64a2-4e74-91e2-7edbf5fd5574-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 13 03:13:27 crc kubenswrapper[5070]: E1213 03:13:27.278554 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 13 03:13:27 crc kubenswrapper[5070]: E1213 03:13:27.278953 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q4tlq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-h5qcs_openshift-marketplace(a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 03:13:27 crc kubenswrapper[5070]: E1213 03:13:27.280228 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-h5qcs" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.590163 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-9chqz_3dd269a0-64a2-4e74-91e2-7edbf5fd5574/kube-multus-additional-cni-plugins/0.log" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.590984 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.596288 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-9chqz" event={"ID":"3dd269a0-64a2-4e74-91e2-7edbf5fd5574","Type":"ContainerDied","Data":"c6a723979946f050d29a6a019370c09270739aad6d70447e61e4298aaec014ab"} Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.596361 5070 scope.go:117] "RemoveContainer" containerID="32f6ad13f5dda53ad7d2730403c2f5db6b8d345d1b69e68d26a7f536813b8595" Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.668861 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-9chqz"] Dec 13 03:13:27 crc kubenswrapper[5070]: I1213 03:13:27.668934 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-9chqz"] Dec 13 03:13:28 crc kubenswrapper[5070]: I1213 03:13:28.172104 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dd269a0-64a2-4e74-91e2-7edbf5fd5574" path="/var/lib/kubelet/pods/3dd269a0-64a2-4e74-91e2-7edbf5fd5574/volumes" Dec 13 03:13:28 crc kubenswrapper[5070]: E1213 03:13:28.885913 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-h5qcs" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" Dec 13 03:13:28 crc kubenswrapper[5070]: E1213 03:13:28.885976 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wvqnd" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" Dec 13 03:13:29 crc kubenswrapper[5070]: E1213 03:13:29.239684 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 13 03:13:29 crc kubenswrapper[5070]: E1213 03:13:29.239899 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6csrd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-lfs78_openshift-marketplace(2782a41c-ce36-4c0d-89c1-27b5e12e9b00): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 03:13:29 crc kubenswrapper[5070]: E1213 03:13:29.241093 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-lfs78" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" Dec 13 03:13:29 crc kubenswrapper[5070]: E1213 03:13:29.441894 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 13 03:13:29 crc kubenswrapper[5070]: E1213 03:13:29.442260 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vbtvm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-j9rhx_openshift-marketplace(b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 03:13:29 crc kubenswrapper[5070]: E1213 03:13:29.443431 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-j9rhx" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.432861 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 13 03:13:30 crc kubenswrapper[5070]: E1213 03:13:30.433368 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd269a0-64a2-4e74-91e2-7edbf5fd5574" containerName="kube-multus-additional-cni-plugins" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.433380 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd269a0-64a2-4e74-91e2-7edbf5fd5574" containerName="kube-multus-additional-cni-plugins" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.433529 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dd269a0-64a2-4e74-91e2-7edbf5fd5574" containerName="kube-multus-additional-cni-plugins" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.433914 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.441829 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.505177 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-var-lock\") pod \"installer-9-crc\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.505251 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c339fa5-cbb7-4493-ad68-da5f1a046330-kube-api-access\") pod \"installer-9-crc\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.505315 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-kubelet-dir\") pod \"installer-9-crc\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.606545 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-kubelet-dir\") pod \"installer-9-crc\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.606625 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-var-lock\") pod \"installer-9-crc\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.606650 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c339fa5-cbb7-4493-ad68-da5f1a046330-kube-api-access\") pod \"installer-9-crc\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.606738 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-kubelet-dir\") pod \"installer-9-crc\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.606811 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-var-lock\") pod \"installer-9-crc\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.627492 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c339fa5-cbb7-4493-ad68-da5f1a046330-kube-api-access\") pod \"installer-9-crc\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:30 crc kubenswrapper[5070]: I1213 03:13:30.757501 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:13:31 crc kubenswrapper[5070]: I1213 03:13:31.937465 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 03:13:32 crc kubenswrapper[5070]: E1213 03:13:32.165821 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-lfs78" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" Dec 13 03:13:32 crc kubenswrapper[5070]: E1213 03:13:32.166233 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-j9rhx" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" Dec 13 03:13:32 crc kubenswrapper[5070]: I1213 03:13:32.624009 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"7c30f75204c5cf351e0360b8d66d7ddff971c96dd493d3ec75bdc8d86a1d0342"} Dec 13 03:13:33 crc kubenswrapper[5070]: I1213 03:13:33.635433 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ed7071704a713d638f29716ae27f1ead1fd2e316dd6266285b426d5aa571529a"} Dec 13 03:13:33 crc kubenswrapper[5070]: I1213 03:13:33.761748 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 13 03:13:33 crc kubenswrapper[5070]: W1213 03:13:33.774121 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod612826dc_fa90_4c14_9fb8_3ea90be2fdb3.slice/crio-692f9c878b71d9e31456276e8b767bfde593a96309053159f2a89b5228756017 WatchSource:0}: Error finding container 692f9c878b71d9e31456276e8b767bfde593a96309053159f2a89b5228756017: Status 404 returned error can't find the container with id 692f9c878b71d9e31456276e8b767bfde593a96309053159f2a89b5228756017 Dec 13 03:13:34 crc kubenswrapper[5070]: I1213 03:13:34.208127 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 13 03:13:34 crc kubenswrapper[5070]: W1213 03:13:34.216143 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod6c339fa5_cbb7_4493_ad68_da5f1a046330.slice/crio-3526207975bb2416de32e21c5bcb81c4d24b9a0989e8b7a460f24b253c9639d0 WatchSource:0}: Error finding container 3526207975bb2416de32e21c5bcb81c4d24b9a0989e8b7a460f24b253c9639d0: Status 404 returned error can't find the container with id 3526207975bb2416de32e21c5bcb81c4d24b9a0989e8b7a460f24b253c9639d0 Dec 13 03:13:34 crc kubenswrapper[5070]: E1213 03:13:34.411105 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-podd75d0b70_8a24_4f73_93b3_190d058c7dc0.slice/crio-2991a63c1dd752c979e8a43188d90ab361093396b6468e0b4aa566fd072fcb35\": RecentStats: unable to find data in memory cache]" Dec 13 03:13:34 crc kubenswrapper[5070]: I1213 03:13:34.640016 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"612826dc-fa90-4c14-9fb8-3ea90be2fdb3","Type":"ContainerStarted","Data":"fad827ef5d3795ebeb2a2486c463afba571f6aee6c8919541531d9e38fb07828"} Dec 13 03:13:34 crc kubenswrapper[5070]: I1213 03:13:34.640062 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"612826dc-fa90-4c14-9fb8-3ea90be2fdb3","Type":"ContainerStarted","Data":"692f9c878b71d9e31456276e8b767bfde593a96309053159f2a89b5228756017"} Dec 13 03:13:34 crc kubenswrapper[5070]: I1213 03:13:34.643155 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"6c339fa5-cbb7-4493-ad68-da5f1a046330","Type":"ContainerStarted","Data":"3526207975bb2416de32e21c5bcb81c4d24b9a0989e8b7a460f24b253c9639d0"} Dec 13 03:13:34 crc kubenswrapper[5070]: I1213 03:13:34.652644 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q5mgn" event={"ID":"383cb754-9d42-43dd-9cb7-5238fec04ce5","Type":"ContainerStarted","Data":"14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461"} Dec 13 03:13:34 crc kubenswrapper[5070]: I1213 03:13:34.661135 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pd84r" event={"ID":"9748a82a-6eee-4d58-b638-9def1ee4bb38","Type":"ContainerStarted","Data":"c7cf07eb71b9ff54e7fc291a00d3d75ca7b0cf1536d228fc85908d4bc68a025a"} Dec 13 03:13:34 crc kubenswrapper[5070]: I1213 03:13:34.662367 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=9.662350247 podStartE2EDuration="9.662350247s" podCreationTimestamp="2025-12-13 03:13:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:13:34.661631856 +0000 UTC m=+106.897475482" watchObservedRunningTime="2025-12-13 03:13:34.662350247 +0000 UTC m=+106.898193793" Dec 13 03:13:35 crc kubenswrapper[5070]: I1213 03:13:35.675558 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"6c339fa5-cbb7-4493-ad68-da5f1a046330","Type":"ContainerStarted","Data":"959c92940046c89f728cc9017efc8f50f3f93057dcfd5f6f481c039c589d477a"} Dec 13 03:13:35 crc kubenswrapper[5070]: I1213 03:13:35.679107 5070 generic.go:334] "Generic (PLEG): container finished" podID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerID="14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461" exitCode=0 Dec 13 03:13:35 crc kubenswrapper[5070]: I1213 03:13:35.679192 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q5mgn" event={"ID":"383cb754-9d42-43dd-9cb7-5238fec04ce5","Type":"ContainerDied","Data":"14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461"} Dec 13 03:13:35 crc kubenswrapper[5070]: I1213 03:13:35.685596 5070 generic.go:334] "Generic (PLEG): container finished" podID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerID="c7cf07eb71b9ff54e7fc291a00d3d75ca7b0cf1536d228fc85908d4bc68a025a" exitCode=0 Dec 13 03:13:35 crc kubenswrapper[5070]: I1213 03:13:35.685681 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pd84r" event={"ID":"9748a82a-6eee-4d58-b638-9def1ee4bb38","Type":"ContainerDied","Data":"c7cf07eb71b9ff54e7fc291a00d3d75ca7b0cf1536d228fc85908d4bc68a025a"} Dec 13 03:13:35 crc kubenswrapper[5070]: I1213 03:13:35.692359 5070 generic.go:334] "Generic (PLEG): container finished" podID="612826dc-fa90-4c14-9fb8-3ea90be2fdb3" containerID="fad827ef5d3795ebeb2a2486c463afba571f6aee6c8919541531d9e38fb07828" exitCode=0 Dec 13 03:13:35 crc kubenswrapper[5070]: I1213 03:13:35.692405 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"612826dc-fa90-4c14-9fb8-3ea90be2fdb3","Type":"ContainerDied","Data":"fad827ef5d3795ebeb2a2486c463afba571f6aee6c8919541531d9e38fb07828"} Dec 13 03:13:35 crc kubenswrapper[5070]: I1213 03:13:35.702078 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=5.702057181 podStartE2EDuration="5.702057181s" podCreationTimestamp="2025-12-13 03:13:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:13:35.701362211 +0000 UTC m=+107.937205767" watchObservedRunningTime="2025-12-13 03:13:35.702057181 +0000 UTC m=+107.937900757" Dec 13 03:13:36 crc kubenswrapper[5070]: I1213 03:13:36.703686 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q5mgn" event={"ID":"383cb754-9d42-43dd-9cb7-5238fec04ce5","Type":"ContainerStarted","Data":"1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4"} Dec 13 03:13:36 crc kubenswrapper[5070]: I1213 03:13:36.706675 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pd84r" event={"ID":"9748a82a-6eee-4d58-b638-9def1ee4bb38","Type":"ContainerStarted","Data":"09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f"} Dec 13 03:13:36 crc kubenswrapper[5070]: I1213 03:13:36.726517 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q5mgn" podStartSLOduration=2.726050049 podStartE2EDuration="50.72649706s" podCreationTimestamp="2025-12-13 03:12:46 +0000 UTC" firstStartedPulling="2025-12-13 03:12:48.215415149 +0000 UTC m=+60.451258695" lastFinishedPulling="2025-12-13 03:13:36.21586215 +0000 UTC m=+108.451705706" observedRunningTime="2025-12-13 03:13:36.721362554 +0000 UTC m=+108.957206100" watchObservedRunningTime="2025-12-13 03:13:36.72649706 +0000 UTC m=+108.962340616" Dec 13 03:13:36 crc kubenswrapper[5070]: I1213 03:13:36.742568 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pd84r" podStartSLOduration=2.631126327 podStartE2EDuration="50.742547767s" podCreationTimestamp="2025-12-13 03:12:46 +0000 UTC" firstStartedPulling="2025-12-13 03:12:48.123325088 +0000 UTC m=+60.359168634" lastFinishedPulling="2025-12-13 03:13:36.234746518 +0000 UTC m=+108.470590074" observedRunningTime="2025-12-13 03:13:36.739884481 +0000 UTC m=+108.975728037" watchObservedRunningTime="2025-12-13 03:13:36.742547767 +0000 UTC m=+108.978391323" Dec 13 03:13:36 crc kubenswrapper[5070]: I1213 03:13:36.871669 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:13:36 crc kubenswrapper[5070]: I1213 03:13:36.873307 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.015945 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.182122 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.205859 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kube-api-access\") pod \"612826dc-fa90-4c14-9fb8-3ea90be2fdb3\" (UID: \"612826dc-fa90-4c14-9fb8-3ea90be2fdb3\") " Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.205972 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kubelet-dir\") pod \"612826dc-fa90-4c14-9fb8-3ea90be2fdb3\" (UID: \"612826dc-fa90-4c14-9fb8-3ea90be2fdb3\") " Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.206087 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "612826dc-fa90-4c14-9fb8-3ea90be2fdb3" (UID: "612826dc-fa90-4c14-9fb8-3ea90be2fdb3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.206218 5070 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.212709 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "612826dc-fa90-4c14-9fb8-3ea90be2fdb3" (UID: "612826dc-fa90-4c14-9fb8-3ea90be2fdb3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.307175 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/612826dc-fa90-4c14-9fb8-3ea90be2fdb3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.716085 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.717519 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"612826dc-fa90-4c14-9fb8-3ea90be2fdb3","Type":"ContainerDied","Data":"692f9c878b71d9e31456276e8b767bfde593a96309053159f2a89b5228756017"} Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.717690 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="692f9c878b71d9e31456276e8b767bfde593a96309053159f2a89b5228756017" Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.765773 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=0.765755482 podStartE2EDuration="765.755482ms" podCreationTimestamp="2025-12-13 03:13:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:13:37.76151834 +0000 UTC m=+109.997361926" watchObservedRunningTime="2025-12-13 03:13:37.765755482 +0000 UTC m=+110.001599028" Dec 13 03:13:37 crc kubenswrapper[5070]: I1213 03:13:37.954079 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pd84r" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerName="registry-server" probeResult="failure" output=< Dec 13 03:13:37 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 03:13:37 crc kubenswrapper[5070]: > Dec 13 03:13:41 crc kubenswrapper[5070]: I1213 03:13:41.733601 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8pzk" event={"ID":"665082fa-79a0-4ddb-83f8-0f45ad96de11","Type":"ContainerStarted","Data":"51763391905d2db445461a85f8e81929f3ae2f63a0afbe0d79c4fc7bd9dfaee2"} Dec 13 03:13:41 crc kubenswrapper[5070]: I1213 03:13:41.735558 5070 generic.go:334] "Generic (PLEG): container finished" podID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerID="8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293" exitCode=0 Dec 13 03:13:41 crc kubenswrapper[5070]: I1213 03:13:41.735600 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5qcs" event={"ID":"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a","Type":"ContainerDied","Data":"8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293"} Dec 13 03:13:42 crc kubenswrapper[5070]: I1213 03:13:42.754511 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx625" event={"ID":"9e4db140-cd43-41ac-a1d7-1913a66ba814","Type":"ContainerStarted","Data":"97215289fe425d5ac8c5b682aace07bc3e2391bbaf4be1dcedc77a8aa5eb51af"} Dec 13 03:13:42 crc kubenswrapper[5070]: I1213 03:13:42.757146 5070 generic.go:334] "Generic (PLEG): container finished" podID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerID="51763391905d2db445461a85f8e81929f3ae2f63a0afbe0d79c4fc7bd9dfaee2" exitCode=0 Dec 13 03:13:42 crc kubenswrapper[5070]: I1213 03:13:42.757216 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8pzk" event={"ID":"665082fa-79a0-4ddb-83f8-0f45ad96de11","Type":"ContainerDied","Data":"51763391905d2db445461a85f8e81929f3ae2f63a0afbe0d79c4fc7bd9dfaee2"} Dec 13 03:13:42 crc kubenswrapper[5070]: I1213 03:13:42.763581 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5qcs" event={"ID":"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a","Type":"ContainerStarted","Data":"925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc"} Dec 13 03:13:42 crc kubenswrapper[5070]: I1213 03:13:42.788882 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h5qcs" podStartSLOduration=2.439516251 podStartE2EDuration="58.788861184s" podCreationTimestamp="2025-12-13 03:12:44 +0000 UTC" firstStartedPulling="2025-12-13 03:12:46.023642681 +0000 UTC m=+58.259486227" lastFinishedPulling="2025-12-13 03:13:42.372987614 +0000 UTC m=+114.608831160" observedRunningTime="2025-12-13 03:13:42.78591219 +0000 UTC m=+115.021755736" watchObservedRunningTime="2025-12-13 03:13:42.788861184 +0000 UTC m=+115.024704740" Dec 13 03:13:43 crc kubenswrapper[5070]: I1213 03:13:43.772984 5070 generic.go:334] "Generic (PLEG): container finished" podID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerID="97215289fe425d5ac8c5b682aace07bc3e2391bbaf4be1dcedc77a8aa5eb51af" exitCode=0 Dec 13 03:13:43 crc kubenswrapper[5070]: I1213 03:13:43.773018 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx625" event={"ID":"9e4db140-cd43-41ac-a1d7-1913a66ba814","Type":"ContainerDied","Data":"97215289fe425d5ac8c5b682aace07bc3e2391bbaf4be1dcedc77a8aa5eb51af"} Dec 13 03:13:44 crc kubenswrapper[5070]: E1213 03:13:44.564265 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-podd75d0b70_8a24_4f73_93b3_190d058c7dc0.slice/crio-2991a63c1dd752c979e8a43188d90ab361093396b6468e0b4aa566fd072fcb35\": RecentStats: unable to find data in memory cache]" Dec 13 03:13:44 crc kubenswrapper[5070]: I1213 03:13:44.781122 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx625" event={"ID":"9e4db140-cd43-41ac-a1d7-1913a66ba814","Type":"ContainerStarted","Data":"70505c61ef9186ee3955480b750c15bde15e44a281eaaebf9afe00fbc281bf84"} Dec 13 03:13:44 crc kubenswrapper[5070]: I1213 03:13:44.784353 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8pzk" event={"ID":"665082fa-79a0-4ddb-83f8-0f45ad96de11","Type":"ContainerStarted","Data":"8b59e7adbbf1e9132e85eefbbf38a0b532768ef22264d4df90c084374293ac78"} Dec 13 03:13:44 crc kubenswrapper[5070]: I1213 03:13:44.787735 5070 generic.go:334] "Generic (PLEG): container finished" podID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerID="d8ededc24f04f808824250bfbf83941164581455568526a2ce9625267273b050" exitCode=0 Dec 13 03:13:44 crc kubenswrapper[5070]: I1213 03:13:44.787778 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvqnd" event={"ID":"9fd8bef8-6268-421f-86dd-03a873a347ac","Type":"ContainerDied","Data":"d8ededc24f04f808824250bfbf83941164581455568526a2ce9625267273b050"} Dec 13 03:13:44 crc kubenswrapper[5070]: I1213 03:13:44.803071 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rx625" podStartSLOduration=4.404614596 podStartE2EDuration="1m2.803053784s" podCreationTimestamp="2025-12-13 03:12:42 +0000 UTC" firstStartedPulling="2025-12-13 03:12:46.028678934 +0000 UTC m=+58.264522480" lastFinishedPulling="2025-12-13 03:13:44.427118112 +0000 UTC m=+116.662961668" observedRunningTime="2025-12-13 03:13:44.7983789 +0000 UTC m=+117.034222456" watchObservedRunningTime="2025-12-13 03:13:44.803053784 +0000 UTC m=+117.038897330" Dec 13 03:13:44 crc kubenswrapper[5070]: I1213 03:13:44.834006 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v8pzk" podStartSLOduration=4.225340743 podStartE2EDuration="1m1.833988795s" podCreationTimestamp="2025-12-13 03:12:43 +0000 UTC" firstStartedPulling="2025-12-13 03:12:46.032626149 +0000 UTC m=+58.268469695" lastFinishedPulling="2025-12-13 03:13:43.641274191 +0000 UTC m=+115.877117747" observedRunningTime="2025-12-13 03:13:44.833549413 +0000 UTC m=+117.069392959" watchObservedRunningTime="2025-12-13 03:13:44.833988795 +0000 UTC m=+117.069832341" Dec 13 03:13:45 crc kubenswrapper[5070]: I1213 03:13:45.235095 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:13:45 crc kubenswrapper[5070]: I1213 03:13:45.235141 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:13:45 crc kubenswrapper[5070]: I1213 03:13:45.284534 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:13:45 crc kubenswrapper[5070]: I1213 03:13:45.793896 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvqnd" event={"ID":"9fd8bef8-6268-421f-86dd-03a873a347ac","Type":"ContainerStarted","Data":"da02fb1386bd6274500a32baea20786a2ccbe8c04929c4d861efce12ddfa1fdb"} Dec 13 03:13:46 crc kubenswrapper[5070]: I1213 03:13:46.476142 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:13:46 crc kubenswrapper[5070]: I1213 03:13:46.476412 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:13:46 crc kubenswrapper[5070]: I1213 03:13:46.521705 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:13:46 crc kubenswrapper[5070]: I1213 03:13:46.538796 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wvqnd" podStartSLOduration=3.399187576 podStartE2EDuration="1m1.538777109s" podCreationTimestamp="2025-12-13 03:12:45 +0000 UTC" firstStartedPulling="2025-12-13 03:12:47.060220824 +0000 UTC m=+59.296064371" lastFinishedPulling="2025-12-13 03:13:45.199810358 +0000 UTC m=+117.435653904" observedRunningTime="2025-12-13 03:13:45.810266222 +0000 UTC m=+118.046109768" watchObservedRunningTime="2025-12-13 03:13:46.538777109 +0000 UTC m=+118.774620655" Dec 13 03:13:46 crc kubenswrapper[5070]: I1213 03:13:46.832611 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:13:46 crc kubenswrapper[5070]: I1213 03:13:46.909115 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:13:46 crc kubenswrapper[5070]: I1213 03:13:46.962221 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:13:49 crc kubenswrapper[5070]: I1213 03:13:49.890527 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pd84r"] Dec 13 03:13:49 crc kubenswrapper[5070]: I1213 03:13:49.891122 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pd84r" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerName="registry-server" containerID="cri-o://09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f" gracePeriod=2 Dec 13 03:13:51 crc kubenswrapper[5070]: I1213 03:13:51.826934 5070 generic.go:334] "Generic (PLEG): container finished" podID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerID="09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f" exitCode=0 Dec 13 03:13:51 crc kubenswrapper[5070]: I1213 03:13:51.827015 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pd84r" event={"ID":"9748a82a-6eee-4d58-b638-9def1ee4bb38","Type":"ContainerDied","Data":"09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f"} Dec 13 03:13:53 crc kubenswrapper[5070]: I1213 03:13:53.275205 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rx625" Dec 13 03:13:53 crc kubenswrapper[5070]: I1213 03:13:53.276923 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rx625" Dec 13 03:13:53 crc kubenswrapper[5070]: I1213 03:13:53.325022 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rx625" Dec 13 03:13:53 crc kubenswrapper[5070]: I1213 03:13:53.685546 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:13:53 crc kubenswrapper[5070]: I1213 03:13:53.685595 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:13:53 crc kubenswrapper[5070]: I1213 03:13:53.738179 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:13:53 crc kubenswrapper[5070]: I1213 03:13:53.879262 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:13:53 crc kubenswrapper[5070]: I1213 03:13:53.879420 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rx625" Dec 13 03:13:54 crc kubenswrapper[5070]: I1213 03:13:54.289573 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v8pzk"] Dec 13 03:13:55 crc kubenswrapper[5070]: I1213 03:13:55.273864 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:13:55 crc kubenswrapper[5070]: I1213 03:13:55.668419 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:13:55 crc kubenswrapper[5070]: I1213 03:13:55.668479 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:13:55 crc kubenswrapper[5070]: I1213 03:13:55.715095 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:13:55 crc kubenswrapper[5070]: I1213 03:13:55.853084 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v8pzk" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerName="registry-server" containerID="cri-o://8b59e7adbbf1e9132e85eefbbf38a0b532768ef22264d4df90c084374293ac78" gracePeriod=2 Dec 13 03:13:55 crc kubenswrapper[5070]: I1213 03:13:55.892764 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:13:56 crc kubenswrapper[5070]: E1213 03:13:56.871970 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f is running failed: container process not found" containerID="09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 03:13:56 crc kubenswrapper[5070]: E1213 03:13:56.872580 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f is running failed: container process not found" containerID="09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 03:13:56 crc kubenswrapper[5070]: E1213 03:13:56.872936 5070 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f is running failed: container process not found" containerID="09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 03:13:56 crc kubenswrapper[5070]: E1213 03:13:56.872967 5070 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-pd84r" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerName="registry-server" Dec 13 03:13:57 crc kubenswrapper[5070]: I1213 03:13:57.704561 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvqnd"] Dec 13 03:13:57 crc kubenswrapper[5070]: I1213 03:13:57.861545 5070 generic.go:334] "Generic (PLEG): container finished" podID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerID="8b59e7adbbf1e9132e85eefbbf38a0b532768ef22264d4df90c084374293ac78" exitCode=0 Dec 13 03:13:57 crc kubenswrapper[5070]: I1213 03:13:57.861617 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8pzk" event={"ID":"665082fa-79a0-4ddb-83f8-0f45ad96de11","Type":"ContainerDied","Data":"8b59e7adbbf1e9132e85eefbbf38a0b532768ef22264d4df90c084374293ac78"} Dec 13 03:13:57 crc kubenswrapper[5070]: I1213 03:13:57.861784 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wvqnd" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerName="registry-server" containerID="cri-o://da02fb1386bd6274500a32baea20786a2ccbe8c04929c4d861efce12ddfa1fdb" gracePeriod=2 Dec 13 03:13:59 crc kubenswrapper[5070]: I1213 03:13:59.879032 5070 generic.go:334] "Generic (PLEG): container finished" podID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerID="da02fb1386bd6274500a32baea20786a2ccbe8c04929c4d861efce12ddfa1fdb" exitCode=0 Dec 13 03:13:59 crc kubenswrapper[5070]: I1213 03:13:59.879419 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvqnd" event={"ID":"9fd8bef8-6268-421f-86dd-03a873a347ac","Type":"ContainerDied","Data":"da02fb1386bd6274500a32baea20786a2ccbe8c04929c4d861efce12ddfa1fdb"} Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.825800 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.871038 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-utilities\") pod \"9748a82a-6eee-4d58-b638-9def1ee4bb38\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.871106 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sfsn6\" (UniqueName: \"kubernetes.io/projected/9748a82a-6eee-4d58-b638-9def1ee4bb38-kube-api-access-sfsn6\") pod \"9748a82a-6eee-4d58-b638-9def1ee4bb38\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.871164 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-catalog-content\") pod \"9748a82a-6eee-4d58-b638-9def1ee4bb38\" (UID: \"9748a82a-6eee-4d58-b638-9def1ee4bb38\") " Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.872292 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-utilities" (OuterVolumeSpecName: "utilities") pod "9748a82a-6eee-4d58-b638-9def1ee4bb38" (UID: "9748a82a-6eee-4d58-b638-9def1ee4bb38"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.880292 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9748a82a-6eee-4d58-b638-9def1ee4bb38-kube-api-access-sfsn6" (OuterVolumeSpecName: "kube-api-access-sfsn6") pod "9748a82a-6eee-4d58-b638-9def1ee4bb38" (UID: "9748a82a-6eee-4d58-b638-9def1ee4bb38"). InnerVolumeSpecName "kube-api-access-sfsn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.893743 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pd84r" event={"ID":"9748a82a-6eee-4d58-b638-9def1ee4bb38","Type":"ContainerDied","Data":"b83857770be10a01d83f60335a98a56d0e1fe94c2e17dcd14384c9c96835e9fb"} Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.893800 5070 scope.go:117] "RemoveContainer" containerID="09dc471285e913ad3dac0aa48f68bec5582a2013f464d441d43453c0eb9b035f" Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.893930 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pd84r" Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.972851 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sfsn6\" (UniqueName: \"kubernetes.io/projected/9748a82a-6eee-4d58-b638-9def1ee4bb38-kube-api-access-sfsn6\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.972901 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:01 crc kubenswrapper[5070]: I1213 03:14:01.989717 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9748a82a-6eee-4d58-b638-9def1ee4bb38" (UID: "9748a82a-6eee-4d58-b638-9def1ee4bb38"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:14:02 crc kubenswrapper[5070]: I1213 03:14:02.074186 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9748a82a-6eee-4d58-b638-9def1ee4bb38-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:02 crc kubenswrapper[5070]: I1213 03:14:02.212238 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pd84r"] Dec 13 03:14:02 crc kubenswrapper[5070]: I1213 03:14:02.214620 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pd84r"] Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.153569 5070 scope.go:117] "RemoveContainer" containerID="c7cf07eb71b9ff54e7fc291a00d3d75ca7b0cf1536d228fc85908d4bc68a025a" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.214680 5070 scope.go:117] "RemoveContainer" containerID="a0bc254b578967186a1e1447d3a3b58f98033a2c2d0b1bc3da80b406206907b2" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.258941 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.288874 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8s4b\" (UniqueName: \"kubernetes.io/projected/9fd8bef8-6268-421f-86dd-03a873a347ac-kube-api-access-g8s4b\") pod \"9fd8bef8-6268-421f-86dd-03a873a347ac\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.288942 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-utilities\") pod \"9fd8bef8-6268-421f-86dd-03a873a347ac\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.288990 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-catalog-content\") pod \"9fd8bef8-6268-421f-86dd-03a873a347ac\" (UID: \"9fd8bef8-6268-421f-86dd-03a873a347ac\") " Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.290672 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-utilities" (OuterVolumeSpecName: "utilities") pod "9fd8bef8-6268-421f-86dd-03a873a347ac" (UID: "9fd8bef8-6268-421f-86dd-03a873a347ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.304221 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fd8bef8-6268-421f-86dd-03a873a347ac-kube-api-access-g8s4b" (OuterVolumeSpecName: "kube-api-access-g8s4b") pod "9fd8bef8-6268-421f-86dd-03a873a347ac" (UID: "9fd8bef8-6268-421f-86dd-03a873a347ac"). InnerVolumeSpecName "kube-api-access-g8s4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.322765 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9fd8bef8-6268-421f-86dd-03a873a347ac" (UID: "9fd8bef8-6268-421f-86dd-03a873a347ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.350199 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.390506 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-utilities\") pod \"665082fa-79a0-4ddb-83f8-0f45ad96de11\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.390614 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-catalog-content\") pod \"665082fa-79a0-4ddb-83f8-0f45ad96de11\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.390637 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rljcj\" (UniqueName: \"kubernetes.io/projected/665082fa-79a0-4ddb-83f8-0f45ad96de11-kube-api-access-rljcj\") pod \"665082fa-79a0-4ddb-83f8-0f45ad96de11\" (UID: \"665082fa-79a0-4ddb-83f8-0f45ad96de11\") " Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.391471 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-utilities" (OuterVolumeSpecName: "utilities") pod "665082fa-79a0-4ddb-83f8-0f45ad96de11" (UID: "665082fa-79a0-4ddb-83f8-0f45ad96de11"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.393021 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/665082fa-79a0-4ddb-83f8-0f45ad96de11-kube-api-access-rljcj" (OuterVolumeSpecName: "kube-api-access-rljcj") pod "665082fa-79a0-4ddb-83f8-0f45ad96de11" (UID: "665082fa-79a0-4ddb-83f8-0f45ad96de11"). InnerVolumeSpecName "kube-api-access-rljcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.396855 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8s4b\" (UniqueName: \"kubernetes.io/projected/9fd8bef8-6268-421f-86dd-03a873a347ac-kube-api-access-g8s4b\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.396979 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.396999 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.397010 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fd8bef8-6268-421f-86dd-03a873a347ac-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.397020 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rljcj\" (UniqueName: \"kubernetes.io/projected/665082fa-79a0-4ddb-83f8-0f45ad96de11-kube-api-access-rljcj\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.440611 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "665082fa-79a0-4ddb-83f8-0f45ad96de11" (UID: "665082fa-79a0-4ddb-83f8-0f45ad96de11"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.498237 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/665082fa-79a0-4ddb-83f8-0f45ad96de11-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.907003 5070 generic.go:334] "Generic (PLEG): container finished" podID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerID="fe97b6f2ed3d3fd757c8f94d54381670e85de662fcd539e9a0f1c26ad36b5668" exitCode=0 Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.907116 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfs78" event={"ID":"2782a41c-ce36-4c0d-89c1-27b5e12e9b00","Type":"ContainerDied","Data":"fe97b6f2ed3d3fd757c8f94d54381670e85de662fcd539e9a0f1c26ad36b5668"} Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.911902 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j9rhx" event={"ID":"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3","Type":"ContainerStarted","Data":"692f16c72ff955c4d9b1bf9a57be181a758db4674117015ea970bcb633556b8e"} Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.914838 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8pzk" event={"ID":"665082fa-79a0-4ddb-83f8-0f45ad96de11","Type":"ContainerDied","Data":"6ec479fd8968db65f340b97d39704962d4aa1069cca4d0b041984eca85fe189b"} Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.914955 5070 scope.go:117] "RemoveContainer" containerID="8b59e7adbbf1e9132e85eefbbf38a0b532768ef22264d4df90c084374293ac78" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.915098 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8pzk" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.921707 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvqnd" event={"ID":"9fd8bef8-6268-421f-86dd-03a873a347ac","Type":"ContainerDied","Data":"5f5e1a962409c5412481aabfbbcd800acbad711d8c4699e474629f20578e1c13"} Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.921758 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvqnd" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.963131 5070 scope.go:117] "RemoveContainer" containerID="51763391905d2db445461a85f8e81929f3ae2f63a0afbe0d79c4fc7bd9dfaee2" Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.965209 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v8pzk"] Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.980165 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v8pzk"] Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.986877 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvqnd"] Dec 13 03:14:03 crc kubenswrapper[5070]: I1213 03:14:03.990173 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvqnd"] Dec 13 03:14:04 crc kubenswrapper[5070]: I1213 03:14:04.003202 5070 scope.go:117] "RemoveContainer" containerID="ebb4455c6948c1ab50193164599109b00da3724dc443422688906b4786f13e02" Dec 13 03:14:04 crc kubenswrapper[5070]: I1213 03:14:04.017938 5070 scope.go:117] "RemoveContainer" containerID="da02fb1386bd6274500a32baea20786a2ccbe8c04929c4d861efce12ddfa1fdb" Dec 13 03:14:04 crc kubenswrapper[5070]: I1213 03:14:04.028312 5070 scope.go:117] "RemoveContainer" containerID="d8ededc24f04f808824250bfbf83941164581455568526a2ce9625267273b050" Dec 13 03:14:04 crc kubenswrapper[5070]: I1213 03:14:04.038242 5070 scope.go:117] "RemoveContainer" containerID="9f0c23ae0bd181978b27c50199bd9280a8d04966134b1371f54b4a7a1439fd2e" Dec 13 03:14:04 crc kubenswrapper[5070]: I1213 03:14:04.177142 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" path="/var/lib/kubelet/pods/665082fa-79a0-4ddb-83f8-0f45ad96de11/volumes" Dec 13 03:14:04 crc kubenswrapper[5070]: I1213 03:14:04.177862 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" path="/var/lib/kubelet/pods/9748a82a-6eee-4d58-b638-9def1ee4bb38/volumes" Dec 13 03:14:04 crc kubenswrapper[5070]: I1213 03:14:04.178576 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" path="/var/lib/kubelet/pods/9fd8bef8-6268-421f-86dd-03a873a347ac/volumes" Dec 13 03:14:04 crc kubenswrapper[5070]: I1213 03:14:04.928640 5070 generic.go:334] "Generic (PLEG): container finished" podID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerID="692f16c72ff955c4d9b1bf9a57be181a758db4674117015ea970bcb633556b8e" exitCode=0 Dec 13 03:14:04 crc kubenswrapper[5070]: I1213 03:14:04.928723 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j9rhx" event={"ID":"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3","Type":"ContainerDied","Data":"692f16c72ff955c4d9b1bf9a57be181a758db4674117015ea970bcb633556b8e"} Dec 13 03:14:05 crc kubenswrapper[5070]: I1213 03:14:05.939788 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfs78" event={"ID":"2782a41c-ce36-4c0d-89c1-27b5e12e9b00","Type":"ContainerStarted","Data":"8cfe61d6e66a78b04addb35438357158f34a2d1fc5f3803f7d398cc303ed9906"} Dec 13 03:14:05 crc kubenswrapper[5070]: I1213 03:14:05.942363 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j9rhx" event={"ID":"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3","Type":"ContainerStarted","Data":"e3f7a276cafef522541a276a57264707493cd7afd7d3d8ffda3193c324cb8ebc"} Dec 13 03:14:05 crc kubenswrapper[5070]: I1213 03:14:05.963298 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lfs78" podStartSLOduration=3.400799591 podStartE2EDuration="1m22.963277548s" podCreationTimestamp="2025-12-13 03:12:43 +0000 UTC" firstStartedPulling="2025-12-13 03:12:46.018278401 +0000 UTC m=+58.254121947" lastFinishedPulling="2025-12-13 03:14:05.580756358 +0000 UTC m=+137.816599904" observedRunningTime="2025-12-13 03:14:05.960794277 +0000 UTC m=+138.196637823" watchObservedRunningTime="2025-12-13 03:14:05.963277548 +0000 UTC m=+138.199121094" Dec 13 03:14:05 crc kubenswrapper[5070]: I1213 03:14:05.981203 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-j9rhx" podStartSLOduration=3.544902319 podStartE2EDuration="1m22.981181178s" podCreationTimestamp="2025-12-13 03:12:43 +0000 UTC" firstStartedPulling="2025-12-13 03:12:46.005991836 +0000 UTC m=+58.241835382" lastFinishedPulling="2025-12-13 03:14:05.442270695 +0000 UTC m=+137.678114241" observedRunningTime="2025-12-13 03:14:05.975294639 +0000 UTC m=+138.211138195" watchObservedRunningTime="2025-12-13 03:14:05.981181178 +0000 UTC m=+138.217024724" Dec 13 03:14:08 crc kubenswrapper[5070]: I1213 03:14:08.915672 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dwkv2"] Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.203718 5070 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.204176 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerName="registry-server" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204188 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerName="registry-server" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.204202 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerName="extract-utilities" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204209 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerName="extract-utilities" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.204222 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="612826dc-fa90-4c14-9fb8-3ea90be2fdb3" containerName="pruner" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204229 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="612826dc-fa90-4c14-9fb8-3ea90be2fdb3" containerName="pruner" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.204238 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerName="registry-server" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204246 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerName="registry-server" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.204254 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerName="extract-utilities" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204262 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerName="extract-utilities" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.204271 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerName="registry-server" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204278 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerName="registry-server" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.204286 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerName="extract-content" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204291 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerName="extract-content" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.204306 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerName="extract-utilities" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204311 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerName="extract-utilities" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.204319 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerName="extract-content" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204325 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerName="extract-content" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.204333 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerName="extract-content" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204339 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerName="extract-content" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204482 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fd8bef8-6268-421f-86dd-03a873a347ac" containerName="registry-server" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204502 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="9748a82a-6eee-4d58-b638-9def1ee4bb38" containerName="registry-server" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204510 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="612826dc-fa90-4c14-9fb8-3ea90be2fdb3" containerName="pruner" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204523 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="665082fa-79a0-4ddb-83f8-0f45ad96de11" containerName="registry-server" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.204866 5070 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.205032 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.205114 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605" gracePeriod=15 Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.205157 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673" gracePeriod=15 Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.205171 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba" gracePeriod=15 Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.205224 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://3c7166d048c7f6f352babbbbff3d8526971042faedd18a600098c5a1edc72db4" gracePeriod=15 Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.205245 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f" gracePeriod=15 Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206130 5070 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.206294 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206315 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.206324 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206333 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.206346 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206353 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.206364 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206371 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.206380 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206388 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.206396 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206403 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.206414 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206422 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.206434 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206445 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206569 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206583 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206591 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206598 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206607 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206616 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.206804 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.245353 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.315721 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.315766 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.315797 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.315818 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.315832 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.315858 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.315884 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.315904 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.416945 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.416993 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417026 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417044 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417072 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417088 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417086 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417106 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417144 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417151 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417156 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417132 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417173 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417184 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417314 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.417334 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.542603 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:14:12 crc kubenswrapper[5070]: W1213 03:14:12.588312 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-2a3584319bb2d112a071163eaa6700e0411a0833273d2e76c8f2261c2ec96ab0 WatchSource:0}: Error finding container 2a3584319bb2d112a071163eaa6700e0411a0833273d2e76c8f2261c2ec96ab0: Status 404 returned error can't find the container with id 2a3584319bb2d112a071163eaa6700e0411a0833273d2e76c8f2261c2ec96ab0 Dec 13 03:14:12 crc kubenswrapper[5070]: E1213 03:14:12.595301 5070 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.227:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880a7e439d79659 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 03:14:12.594046553 +0000 UTC m=+144.829890099,LastTimestamp:2025-12-13 03:14:12.594046553 +0000 UTC m=+144.829890099,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.990774 5070 generic.go:334] "Generic (PLEG): container finished" podID="6c339fa5-cbb7-4493-ad68-da5f1a046330" containerID="959c92940046c89f728cc9017efc8f50f3f93057dcfd5f6f481c039c589d477a" exitCode=0 Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.990881 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"6c339fa5-cbb7-4493-ad68-da5f1a046330","Type":"ContainerDied","Data":"959c92940046c89f728cc9017efc8f50f3f93057dcfd5f6f481c039c589d477a"} Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.991659 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.991931 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.991935 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"c45b32a10a0c667ecc3fb1c2df88dce51460bc06b86c232efce02f89a2850437"} Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.992071 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"2a3584319bb2d112a071163eaa6700e0411a0833273d2e76c8f2261c2ec96ab0"} Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.992243 5070 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.992563 5070 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.992829 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.993052 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.994046 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.995650 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.996281 5070 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3c7166d048c7f6f352babbbbff3d8526971042faedd18a600098c5a1edc72db4" exitCode=0 Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.996305 5070 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673" exitCode=0 Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.996315 5070 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba" exitCode=0 Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.996325 5070 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f" exitCode=2 Dec 13 03:14:12 crc kubenswrapper[5070]: I1213 03:14:12.996365 5070 scope.go:117] "RemoveContainer" containerID="bee949adc33cab61eb1d70c6cecda6bbbf42616b431d2af90fcf71854407da41" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.516201 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.516456 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.550397 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.551024 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.551558 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.552027 5070 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.552284 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:13 crc kubenswrapper[5070]: E1213 03:14:13.719972 5070 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.227:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880a7e439d79659 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 03:14:12.594046553 +0000 UTC m=+144.829890099,LastTimestamp:2025-12-13 03:14:12.594046553 +0000 UTC m=+144.829890099,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.888483 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.888536 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.925104 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.925701 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.926110 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.926427 5070 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.926689 5070 status_manager.go:851] "Failed to get status for pod" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" pod="openshift-marketplace/certified-operators-j9rhx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-j9rhx\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:13 crc kubenswrapper[5070]: I1213 03:14:13.926929 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.004241 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.044352 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.045013 5070 status_manager.go:851] "Failed to get status for pod" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" pod="openshift-marketplace/certified-operators-j9rhx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-j9rhx\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.045411 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.045715 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.045973 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.046219 5070 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.061103 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.061683 5070 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.061951 5070 status_manager.go:851] "Failed to get status for pod" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" pod="openshift-marketplace/certified-operators-j9rhx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-j9rhx\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.062213 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.062495 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.062748 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: E1213 03:14:14.223712 5070 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.227:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" volumeName="registry-storage" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.268123 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.268713 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.268905 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.269209 5070 status_manager.go:851] "Failed to get status for pod" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" pod="openshift-marketplace/certified-operators-j9rhx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-j9rhx\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.270856 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.339919 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-kubelet-dir\") pod \"6c339fa5-cbb7-4493-ad68-da5f1a046330\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.339983 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-var-lock\") pod \"6c339fa5-cbb7-4493-ad68-da5f1a046330\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.340010 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c339fa5-cbb7-4493-ad68-da5f1a046330-kube-api-access\") pod \"6c339fa5-cbb7-4493-ad68-da5f1a046330\" (UID: \"6c339fa5-cbb7-4493-ad68-da5f1a046330\") " Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.340932 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "6c339fa5-cbb7-4493-ad68-da5f1a046330" (UID: "6c339fa5-cbb7-4493-ad68-da5f1a046330"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.340978 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-var-lock" (OuterVolumeSpecName: "var-lock") pod "6c339fa5-cbb7-4493-ad68-da5f1a046330" (UID: "6c339fa5-cbb7-4493-ad68-da5f1a046330"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.344910 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c339fa5-cbb7-4493-ad68-da5f1a046330-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "6c339fa5-cbb7-4493-ad68-da5f1a046330" (UID: "6c339fa5-cbb7-4493-ad68-da5f1a046330"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.441198 5070 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.441603 5070 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/6c339fa5-cbb7-4493-ad68-da5f1a046330-var-lock\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:14 crc kubenswrapper[5070]: I1213 03:14:14.441618 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c339fa5-cbb7-4493-ad68-da5f1a046330-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:15 crc kubenswrapper[5070]: I1213 03:14:15.018862 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"6c339fa5-cbb7-4493-ad68-da5f1a046330","Type":"ContainerDied","Data":"3526207975bb2416de32e21c5bcb81c4d24b9a0989e8b7a460f24b253c9639d0"} Dec 13 03:14:15 crc kubenswrapper[5070]: I1213 03:14:15.018903 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3526207975bb2416de32e21c5bcb81c4d24b9a0989e8b7a460f24b253c9639d0" Dec 13 03:14:15 crc kubenswrapper[5070]: I1213 03:14:15.018935 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 03:14:15 crc kubenswrapper[5070]: I1213 03:14:15.034351 5070 status_manager.go:851] "Failed to get status for pod" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" pod="openshift-marketplace/certified-operators-j9rhx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-j9rhx\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:15 crc kubenswrapper[5070]: I1213 03:14:15.034680 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:15 crc kubenswrapper[5070]: I1213 03:14:15.034989 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:15 crc kubenswrapper[5070]: I1213 03:14:15.035293 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.028225 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.029932 5070 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605" exitCode=0 Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.194023 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.194774 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.195276 5070 status_manager.go:851] "Failed to get status for pod" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" pod="openshift-marketplace/certified-operators-j9rhx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-j9rhx\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.195527 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.195733 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.196092 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.196499 5070 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.261985 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.262097 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.262184 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.262227 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.262229 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.262333 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.262539 5070 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.262562 5070 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:16 crc kubenswrapper[5070]: I1213 03:14:16.262579 5070 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.041365 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.042573 5070 scope.go:117] "RemoveContainer" containerID="3c7166d048c7f6f352babbbbff3d8526971042faedd18a600098c5a1edc72db4" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.042620 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.057971 5070 scope.go:117] "RemoveContainer" containerID="4317a72f422d158e2d22658fd3601f8f2da155e957551144bc153bcaca1f4673" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.059649 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.060240 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.061380 5070 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.061975 5070 status_manager.go:851] "Failed to get status for pod" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" pod="openshift-marketplace/certified-operators-j9rhx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-j9rhx\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.062247 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.069974 5070 scope.go:117] "RemoveContainer" containerID="7dfbc973291e3f829acc09c1dc5e465454ba0dc5ab088fc08211a86f51880fba" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.083888 5070 scope.go:117] "RemoveContainer" containerID="57c12f12bfa97cb3476d9af993bc99bdd178d8d7a2f3d92bac86c096d2729c8f" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.096512 5070 scope.go:117] "RemoveContainer" containerID="7c109d3244fa0324a8388b5f433ae9d5eb4a8519b6abc901f9fb91284885b605" Dec 13 03:14:17 crc kubenswrapper[5070]: I1213 03:14:17.111587 5070 scope.go:117] "RemoveContainer" containerID="6fe1de36954b63d9bc0b059085237ea841127d94254ef6b5a2a7418077c3e367" Dec 13 03:14:18 crc kubenswrapper[5070]: I1213 03:14:18.169987 5070 status_manager.go:851] "Failed to get status for pod" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" pod="openshift-marketplace/certified-operators-j9rhx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-j9rhx\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:18 crc kubenswrapper[5070]: I1213 03:14:18.170177 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:18 crc kubenswrapper[5070]: I1213 03:14:18.170425 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:18 crc kubenswrapper[5070]: I1213 03:14:18.170690 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:18 crc kubenswrapper[5070]: I1213 03:14:18.171173 5070 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:18 crc kubenswrapper[5070]: I1213 03:14:18.175191 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 13 03:14:18 crc kubenswrapper[5070]: E1213 03:14:18.955771 5070 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:18 crc kubenswrapper[5070]: E1213 03:14:18.956094 5070 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:18 crc kubenswrapper[5070]: E1213 03:14:18.956369 5070 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:18 crc kubenswrapper[5070]: E1213 03:14:18.956608 5070 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:18 crc kubenswrapper[5070]: E1213 03:14:18.962531 5070 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:18 crc kubenswrapper[5070]: I1213 03:14:18.962594 5070 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 13 03:14:18 crc kubenswrapper[5070]: E1213 03:14:18.963044 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="200ms" Dec 13 03:14:19 crc kubenswrapper[5070]: E1213 03:14:19.164469 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="400ms" Dec 13 03:14:19 crc kubenswrapper[5070]: E1213 03:14:19.565313 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="800ms" Dec 13 03:14:20 crc kubenswrapper[5070]: E1213 03:14:20.366714 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="1.6s" Dec 13 03:14:21 crc kubenswrapper[5070]: E1213 03:14:21.967828 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="3.2s" Dec 13 03:14:23 crc kubenswrapper[5070]: E1213 03:14:23.721182 5070 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.227:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880a7e439d79659 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 03:14:12.594046553 +0000 UTC m=+144.829890099,LastTimestamp:2025-12-13 03:14:12.594046553 +0000 UTC m=+144.829890099,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 03:14:24 crc kubenswrapper[5070]: I1213 03:14:24.166013 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:24 crc kubenswrapper[5070]: I1213 03:14:24.167033 5070 status_manager.go:851] "Failed to get status for pod" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" pod="openshift-marketplace/certified-operators-j9rhx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-j9rhx\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:24 crc kubenswrapper[5070]: I1213 03:14:24.167847 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:24 crc kubenswrapper[5070]: I1213 03:14:24.168486 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:24 crc kubenswrapper[5070]: I1213 03:14:24.168939 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:24 crc kubenswrapper[5070]: I1213 03:14:24.192134 5070 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6e688eb2-80cc-47e6-9b7b-748d6e6175ff" Dec 13 03:14:24 crc kubenswrapper[5070]: I1213 03:14:24.192173 5070 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6e688eb2-80cc-47e6-9b7b-748d6e6175ff" Dec 13 03:14:24 crc kubenswrapper[5070]: E1213 03:14:24.192715 5070 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:24 crc kubenswrapper[5070]: I1213 03:14:24.193358 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:25 crc kubenswrapper[5070]: I1213 03:14:25.087416 5070 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="7134044b22e6eab3fac7ca221055eb6c540f41f79e6a3bdb693400558ce3cdbf" exitCode=0 Dec 13 03:14:25 crc kubenswrapper[5070]: I1213 03:14:25.087738 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"7134044b22e6eab3fac7ca221055eb6c540f41f79e6a3bdb693400558ce3cdbf"} Dec 13 03:14:25 crc kubenswrapper[5070]: I1213 03:14:25.087773 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f7cfc205c31d04f3eb5681eefbc0e211a32b11d9e627997543ff17c078004f66"} Dec 13 03:14:25 crc kubenswrapper[5070]: I1213 03:14:25.088030 5070 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6e688eb2-80cc-47e6-9b7b-748d6e6175ff" Dec 13 03:14:25 crc kubenswrapper[5070]: I1213 03:14:25.088045 5070 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6e688eb2-80cc-47e6-9b7b-748d6e6175ff" Dec 13 03:14:25 crc kubenswrapper[5070]: I1213 03:14:25.088950 5070 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:25 crc kubenswrapper[5070]: E1213 03:14:25.088998 5070 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:25 crc kubenswrapper[5070]: I1213 03:14:25.089122 5070 status_manager.go:851] "Failed to get status for pod" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:25 crc kubenswrapper[5070]: I1213 03:14:25.089337 5070 status_manager.go:851] "Failed to get status for pod" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" pod="openshift-marketplace/certified-operators-j9rhx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-j9rhx\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:25 crc kubenswrapper[5070]: I1213 03:14:25.089562 5070 status_manager.go:851] "Failed to get status for pod" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" pod="openshift-marketplace/certified-operators-lfs78" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-lfs78\": dial tcp 38.102.83.227:6443: connect: connection refused" Dec 13 03:14:25 crc kubenswrapper[5070]: E1213 03:14:25.169174 5070 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="6.4s" Dec 13 03:14:26 crc kubenswrapper[5070]: I1213 03:14:26.094192 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"66cc0f9ffbf257c680cf2a96adfe1e33675647ad32f37d34fe9ac2e4dcdba14d"} Dec 13 03:14:26 crc kubenswrapper[5070]: I1213 03:14:26.094821 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0f2e679db199e98f91b88b61a355a2f51185ff4a573101ef1f0043c1baad6c27"} Dec 13 03:14:26 crc kubenswrapper[5070]: I1213 03:14:26.094836 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5657312aedb99145d3bab5a9c8ad565f1ec7b75418a582226a8dc3e7bf1c914e"} Dec 13 03:14:27 crc kubenswrapper[5070]: I1213 03:14:27.103402 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"097feffa425ac625755433f1c3b454671d1b94f45c3cdf7186e8dc791805aa48"} Dec 13 03:14:27 crc kubenswrapper[5070]: I1213 03:14:27.105552 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 13 03:14:27 crc kubenswrapper[5070]: I1213 03:14:27.105695 5070 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="32c7d4f0ad60bf4cac9e70945fd023c2bbe79250fac7118c090d735ad2965c6d" exitCode=1 Dec 13 03:14:27 crc kubenswrapper[5070]: I1213 03:14:27.105728 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"32c7d4f0ad60bf4cac9e70945fd023c2bbe79250fac7118c090d735ad2965c6d"} Dec 13 03:14:27 crc kubenswrapper[5070]: I1213 03:14:27.106264 5070 scope.go:117] "RemoveContainer" containerID="32c7d4f0ad60bf4cac9e70945fd023c2bbe79250fac7118c090d735ad2965c6d" Dec 13 03:14:28 crc kubenswrapper[5070]: I1213 03:14:28.113481 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"cae1b0c68ff173d6c95d361dc7501bca02f9f8b73b2667c03c3801d566201d90"} Dec 13 03:14:28 crc kubenswrapper[5070]: I1213 03:14:28.113623 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:28 crc kubenswrapper[5070]: I1213 03:14:28.113702 5070 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6e688eb2-80cc-47e6-9b7b-748d6e6175ff" Dec 13 03:14:28 crc kubenswrapper[5070]: I1213 03:14:28.113728 5070 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6e688eb2-80cc-47e6-9b7b-748d6e6175ff" Dec 13 03:14:28 crc kubenswrapper[5070]: I1213 03:14:28.116382 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 13 03:14:28 crc kubenswrapper[5070]: I1213 03:14:28.116562 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"77bbb4c5326fbaa6830484aadadfe22599237b58fedeebdf3d506ab70517d69e"} Dec 13 03:14:28 crc kubenswrapper[5070]: I1213 03:14:28.647244 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:14:28 crc kubenswrapper[5070]: I1213 03:14:28.647756 5070 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 13 03:14:28 crc kubenswrapper[5070]: I1213 03:14:28.647823 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 13 03:14:29 crc kubenswrapper[5070]: I1213 03:14:29.194294 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:29 crc kubenswrapper[5070]: I1213 03:14:29.194565 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:29 crc kubenswrapper[5070]: I1213 03:14:29.198791 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:33 crc kubenswrapper[5070]: I1213 03:14:33.123247 5070 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:33 crc kubenswrapper[5070]: I1213 03:14:33.938273 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" podUID="e0009fee-6991-4819-ae3d-2d075aa961af" containerName="oauth-openshift" containerID="cri-o://5791208f9c365a6b1e4a9fec8efd6b6f1f3ab36edd0c9592694addeff6bfc9d1" gracePeriod=15 Dec 13 03:14:34 crc kubenswrapper[5070]: I1213 03:14:34.157615 5070 generic.go:334] "Generic (PLEG): container finished" podID="e0009fee-6991-4819-ae3d-2d075aa961af" containerID="5791208f9c365a6b1e4a9fec8efd6b6f1f3ab36edd0c9592694addeff6bfc9d1" exitCode=0 Dec 13 03:14:34 crc kubenswrapper[5070]: I1213 03:14:34.157719 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" event={"ID":"e0009fee-6991-4819-ae3d-2d075aa961af","Type":"ContainerDied","Data":"5791208f9c365a6b1e4a9fec8efd6b6f1f3ab36edd0c9592694addeff6bfc9d1"} Dec 13 03:14:34 crc kubenswrapper[5070]: I1213 03:14:34.157973 5070 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6e688eb2-80cc-47e6-9b7b-748d6e6175ff" Dec 13 03:14:34 crc kubenswrapper[5070]: I1213 03:14:34.157985 5070 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6e688eb2-80cc-47e6-9b7b-748d6e6175ff" Dec 13 03:14:34 crc kubenswrapper[5070]: I1213 03:14:34.164039 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:34 crc kubenswrapper[5070]: I1213 03:14:34.167228 5070 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="41c9dced-25ef-49ec-9612-00fb535f9f7b" Dec 13 03:14:34 crc kubenswrapper[5070]: I1213 03:14:34.951068 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006365 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-service-ca\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006403 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-login\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006431 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvcfp\" (UniqueName: \"kubernetes.io/projected/e0009fee-6991-4819-ae3d-2d075aa961af-kube-api-access-nvcfp\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006462 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-error\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006483 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-provider-selection\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006531 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-session\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006560 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-serving-cert\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006600 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-router-certs\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006638 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-cliconfig\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006676 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-idp-0-file-data\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006694 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-ocp-branding-template\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006742 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-trusted-ca-bundle\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006787 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e0009fee-6991-4819-ae3d-2d075aa961af-audit-dir\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.006817 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-audit-policies\") pod \"e0009fee-6991-4819-ae3d-2d075aa961af\" (UID: \"e0009fee-6991-4819-ae3d-2d075aa961af\") " Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.008060 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.008115 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.008174 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0009fee-6991-4819-ae3d-2d075aa961af-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.008574 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.009022 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.012781 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0009fee-6991-4819-ae3d-2d075aa961af-kube-api-access-nvcfp" (OuterVolumeSpecName: "kube-api-access-nvcfp") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "kube-api-access-nvcfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.012807 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.013057 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.013417 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.013599 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.013789 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.021822 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.022814 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.025554 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "e0009fee-6991-4819-ae3d-2d075aa961af" (UID: "e0009fee-6991-4819-ae3d-2d075aa961af"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108051 5070 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e0009fee-6991-4819-ae3d-2d075aa961af-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108092 5070 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108109 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108127 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108141 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvcfp\" (UniqueName: \"kubernetes.io/projected/e0009fee-6991-4819-ae3d-2d075aa961af-kube-api-access-nvcfp\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108153 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108165 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108177 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108190 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108201 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108213 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108225 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108237 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.108249 5070 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0009fee-6991-4819-ae3d-2d075aa961af-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.163833 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" event={"ID":"e0009fee-6991-4819-ae3d-2d075aa961af","Type":"ContainerDied","Data":"30cc032d356b726ab109a95efc70bedfbecea35f8004ce8257621adfc3a0f6f1"} Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.163890 5070 scope.go:117] "RemoveContainer" containerID="5791208f9c365a6b1e4a9fec8efd6b6f1f3ab36edd0c9592694addeff6bfc9d1" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.163906 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dwkv2" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.164034 5070 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6e688eb2-80cc-47e6-9b7b-748d6e6175ff" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.164059 5070 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6e688eb2-80cc-47e6-9b7b-748d6e6175ff" Dec 13 03:14:35 crc kubenswrapper[5070]: E1213 03:14:35.217700 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0009fee_6991_4819_ae3d_2d075aa961af.slice\": RecentStats: unable to find data in memory cache]" Dec 13 03:14:35 crc kubenswrapper[5070]: I1213 03:14:35.361747 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:14:38 crc kubenswrapper[5070]: I1213 03:14:38.191647 5070 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="41c9dced-25ef-49ec-9612-00fb535f9f7b" Dec 13 03:14:38 crc kubenswrapper[5070]: I1213 03:14:38.651838 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:14:38 crc kubenswrapper[5070]: I1213 03:14:38.658736 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 03:14:43 crc kubenswrapper[5070]: I1213 03:14:43.379171 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 13 03:14:43 crc kubenswrapper[5070]: I1213 03:14:43.544045 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 13 03:14:43 crc kubenswrapper[5070]: I1213 03:14:43.706204 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 13 03:14:43 crc kubenswrapper[5070]: I1213 03:14:43.793195 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 13 03:14:43 crc kubenswrapper[5070]: I1213 03:14:43.907995 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 13 03:14:44 crc kubenswrapper[5070]: I1213 03:14:44.004023 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 13 03:14:44 crc kubenswrapper[5070]: I1213 03:14:44.345113 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 13 03:14:44 crc kubenswrapper[5070]: I1213 03:14:44.590196 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 13 03:14:44 crc kubenswrapper[5070]: I1213 03:14:44.637358 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 13 03:14:44 crc kubenswrapper[5070]: I1213 03:14:44.641000 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 13 03:14:44 crc kubenswrapper[5070]: I1213 03:14:44.715613 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 13 03:14:44 crc kubenswrapper[5070]: I1213 03:14:44.865946 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 13 03:14:44 crc kubenswrapper[5070]: I1213 03:14:44.950886 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 13 03:14:45 crc kubenswrapper[5070]: I1213 03:14:45.120679 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 13 03:14:45 crc kubenswrapper[5070]: I1213 03:14:45.237142 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 13 03:14:45 crc kubenswrapper[5070]: I1213 03:14:45.269321 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 13 03:14:45 crc kubenswrapper[5070]: I1213 03:14:45.276202 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 13 03:14:45 crc kubenswrapper[5070]: I1213 03:14:45.466609 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 13 03:14:45 crc kubenswrapper[5070]: I1213 03:14:45.540509 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.039299 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.211331 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.243530 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.348783 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.387923 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.523166 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.533580 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.544002 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.641709 5070 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.646329 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.673252 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.699242 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.816240 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.892065 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.895012 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 13 03:14:46 crc kubenswrapper[5070]: I1213 03:14:46.973990 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.172239 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.243532 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.250037 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.427276 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.532786 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.542959 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.565129 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.810017 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.815625 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.849676 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.866219 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 13 03:14:47 crc kubenswrapper[5070]: I1213 03:14:47.892352 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.015720 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.015982 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.016576 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.042592 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.074568 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.106361 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.193979 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.219553 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.240162 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.241546 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.331044 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.454825 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.492584 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.622186 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.650644 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.696097 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.698878 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.778171 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.875911 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.910858 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 03:14:48 crc kubenswrapper[5070]: I1213 03:14:48.974559 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.006566 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.115727 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.212171 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.216000 5070 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.217908 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=37.217896186 podStartE2EDuration="37.217896186s" podCreationTimestamp="2025-12-13 03:14:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:14:32.131885058 +0000 UTC m=+164.367728614" watchObservedRunningTime="2025-12-13 03:14:49.217896186 +0000 UTC m=+181.453739732" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.219962 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-dwkv2"] Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.219994 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.220012 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.223733 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.237656 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=16.237642038 podStartE2EDuration="16.237642038s" podCreationTimestamp="2025-12-13 03:14:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:14:49.234682483 +0000 UTC m=+181.470526029" watchObservedRunningTime="2025-12-13 03:14:49.237642038 +0000 UTC m=+181.473485594" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.259612 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.273028 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.283007 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.314802 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.374281 5070 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.420749 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.423189 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.427290 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.427454 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.476664 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.507705 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.532782 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.544243 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.584792 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.585652 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.586102 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.756316 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.765633 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.782505 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.822296 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.841100 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.857984 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.868768 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.876243 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 13 03:14:49 crc kubenswrapper[5070]: I1213 03:14:49.983274 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.052037 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.172849 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0009fee-6991-4819-ae3d-2d075aa961af" path="/var/lib/kubelet/pods/e0009fee-6991-4819-ae3d-2d075aa961af/volumes" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.277575 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.300317 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.325604 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.336120 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.361716 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.373562 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.384529 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.401881 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.470969 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.532611 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.558041 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.563271 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.650025 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.836223 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.861420 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.865382 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 03:14:50 crc kubenswrapper[5070]: I1213 03:14:50.924241 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.004723 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.024910 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.102811 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.171780 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.191133 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.220909 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.225028 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.382317 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.443934 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.501213 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.518151 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.535947 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.562766 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.675471 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.714373 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.862989 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.942539 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.942603 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.963013 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 13 03:14:51 crc kubenswrapper[5070]: I1213 03:14:51.969668 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.064086 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.077336 5070 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.106415 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.138389 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.236930 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.237608 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.288481 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.405762 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.454202 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.489742 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.535027 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.627575 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.627583 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.640250 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.770640 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.779411 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.859844 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.970278 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 13 03:14:52 crc kubenswrapper[5070]: I1213 03:14:52.998880 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.068849 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.107886 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.149433 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.171994 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.219020 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.241175 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.406569 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.409338 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.513171 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.601735 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.603861 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.605844 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.642813 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.653014 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.678910 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.684649 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.705501 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.705723 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.707002 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.762118 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.762180 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.803080 5070 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.822643 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.899076 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.936266 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 13 03:14:53 crc kubenswrapper[5070]: I1213 03:14:53.994719 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.024652 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.038023 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.086380 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.193992 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.229678 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.276091 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.288138 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.351893 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.390687 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.406220 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.462149 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.546292 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.598762 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.598908 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.618257 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt"] Dec 13 03:14:54 crc kubenswrapper[5070]: E1213 03:14:54.618696 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0009fee-6991-4819-ae3d-2d075aa961af" containerName="oauth-openshift" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.618741 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0009fee-6991-4819-ae3d-2d075aa961af" containerName="oauth-openshift" Dec 13 03:14:54 crc kubenswrapper[5070]: E1213 03:14:54.618781 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" containerName="installer" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.618799 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" containerName="installer" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.619066 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0009fee-6991-4819-ae3d-2d075aa961af" containerName="oauth-openshift" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.619114 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c339fa5-cbb7-4493-ad68-da5f1a046330" containerName="installer" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.619956 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.623415 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.624118 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.624753 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.624902 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.625001 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.625053 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.624922 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.625223 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.625269 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.625581 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.625645 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.628417 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.634995 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.639802 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.641255 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt"] Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.645406 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.668520 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.668705 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.668775 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g9sm\" (UniqueName: \"kubernetes.io/projected/9ab99833-4028-4cb4-9361-00a889c030ef-kube-api-access-2g9sm\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.668808 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.668956 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.669324 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9ab99833-4028-4cb4-9361-00a889c030ef-audit-dir\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.669781 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-session\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.669829 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-audit-policies\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.669916 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.669984 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-template-login\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.670053 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.670135 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.670260 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-template-error\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.670304 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.699591 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.709757 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.743370 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.760686 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.769957 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771576 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771626 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-template-error\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771665 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771693 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771728 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771753 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g9sm\" (UniqueName: \"kubernetes.io/projected/9ab99833-4028-4cb4-9361-00a889c030ef-kube-api-access-2g9sm\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771778 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771807 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771832 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9ab99833-4028-4cb4-9361-00a889c030ef-audit-dir\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771878 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-session\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.771971 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9ab99833-4028-4cb4-9361-00a889c030ef-audit-dir\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.772054 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-audit-policies\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.772158 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.772202 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-template-login\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.772240 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.772617 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.772753 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.772945 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-audit-policies\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.773283 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.777423 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.777967 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-template-error\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.778082 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-session\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.778145 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.778183 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.778956 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.779780 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-user-template-login\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.786235 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9ab99833-4028-4cb4-9361-00a889c030ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.794865 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g9sm\" (UniqueName: \"kubernetes.io/projected/9ab99833-4028-4cb4-9361-00a889c030ef-kube-api-access-2g9sm\") pod \"oauth-openshift-58b6cd7fd8-66tvt\" (UID: \"9ab99833-4028-4cb4-9361-00a889c030ef\") " pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.813265 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.815111 5070 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.815314 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://c45b32a10a0c667ecc3fb1c2df88dce51460bc06b86c232efce02f89a2850437" gracePeriod=5 Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.826893 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.842424 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.851337 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.880414 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 13 03:14:54 crc kubenswrapper[5070]: I1213 03:14:54.975079 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.068129 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.090644 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.111695 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.141409 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.390891 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt"] Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.600363 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.627764 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.829014 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.851845 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.889350 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.893105 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 13 03:14:55 crc kubenswrapper[5070]: I1213 03:14:55.988061 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.097389 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.280224 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" event={"ID":"9ab99833-4028-4cb4-9361-00a889c030ef","Type":"ContainerStarted","Data":"e7e6c2b15b406ce7591954e872062d9cfb8ad8d292ade7b4d0feb41470154fe0"} Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.280270 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" event={"ID":"9ab99833-4028-4cb4-9361-00a889c030ef","Type":"ContainerStarted","Data":"c646af7727fb70c70f401856db03ff451842b28504858950d8853a86115e9157"} Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.280579 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.287748 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.289103 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.307384 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-58b6cd7fd8-66tvt" podStartSLOduration=48.307367238 podStartE2EDuration="48.307367238s" podCreationTimestamp="2025-12-13 03:14:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:14:56.300109852 +0000 UTC m=+188.535953408" watchObservedRunningTime="2025-12-13 03:14:56.307367238 +0000 UTC m=+188.543210794" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.310573 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.452712 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.465200 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.609641 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.720345 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.758740 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 13 03:14:56 crc kubenswrapper[5070]: I1213 03:14:56.760120 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 13 03:14:57 crc kubenswrapper[5070]: I1213 03:14:57.080653 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 13 03:14:57 crc kubenswrapper[5070]: I1213 03:14:57.498863 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 13 03:14:57 crc kubenswrapper[5070]: I1213 03:14:57.504294 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 13 03:14:57 crc kubenswrapper[5070]: I1213 03:14:57.605603 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 13 03:14:57 crc kubenswrapper[5070]: I1213 03:14:57.930973 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 13 03:14:58 crc kubenswrapper[5070]: I1213 03:14:58.818911 5070 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 13 03:14:59 crc kubenswrapper[5070]: I1213 03:14:59.052936 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 13 03:14:59 crc kubenswrapper[5070]: I1213 03:14:59.106330 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 13 03:14:59 crc kubenswrapper[5070]: I1213 03:14:59.212397 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 13 03:14:59 crc kubenswrapper[5070]: I1213 03:14:59.246273 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 13 03:14:59 crc kubenswrapper[5070]: I1213 03:14:59.655231 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 13 03:14:59 crc kubenswrapper[5070]: I1213 03:14:59.749517 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.180535 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7"] Dec 13 03:15:00 crc kubenswrapper[5070]: E1213 03:15:00.180774 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.180788 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.180908 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.181347 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.183751 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.183906 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.201312 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7"] Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.239221 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/22cbbb70-41e0-476b-9640-48c5c997cf72-secret-volume\") pod \"collect-profiles-29426595-c87m7\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.239267 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtdmt\" (UniqueName: \"kubernetes.io/projected/22cbbb70-41e0-476b-9640-48c5c997cf72-kube-api-access-dtdmt\") pod \"collect-profiles-29426595-c87m7\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.239316 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22cbbb70-41e0-476b-9640-48c5c997cf72-config-volume\") pod \"collect-profiles-29426595-c87m7\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.271035 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.304014 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.304087 5070 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="c45b32a10a0c667ecc3fb1c2df88dce51460bc06b86c232efce02f89a2850437" exitCode=137 Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.340693 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/22cbbb70-41e0-476b-9640-48c5c997cf72-secret-volume\") pod \"collect-profiles-29426595-c87m7\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.340739 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtdmt\" (UniqueName: \"kubernetes.io/projected/22cbbb70-41e0-476b-9640-48c5c997cf72-kube-api-access-dtdmt\") pod \"collect-profiles-29426595-c87m7\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.340787 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22cbbb70-41e0-476b-9640-48c5c997cf72-config-volume\") pod \"collect-profiles-29426595-c87m7\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.341963 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22cbbb70-41e0-476b-9640-48c5c997cf72-config-volume\") pod \"collect-profiles-29426595-c87m7\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.348901 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/22cbbb70-41e0-476b-9640-48c5c997cf72-secret-volume\") pod \"collect-profiles-29426595-c87m7\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.361779 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtdmt\" (UniqueName: \"kubernetes.io/projected/22cbbb70-41e0-476b-9640-48c5c997cf72-kube-api-access-dtdmt\") pod \"collect-profiles-29426595-c87m7\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.422093 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.422173 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.505715 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.523573 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.542590 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.542728 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.543244 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.543155 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.543396 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.543539 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.543603 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.543741 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.543859 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.544079 5070 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.544177 5070 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.544196 5070 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.544213 5070 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.551946 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.644601 5070 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:00 crc kubenswrapper[5070]: I1213 03:15:00.885726 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7"] Dec 13 03:15:00 crc kubenswrapper[5070]: W1213 03:15:00.896526 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22cbbb70_41e0_476b_9640_48c5c997cf72.slice/crio-d652379125d06942626fe6eb99a3c79aed043646ac7970fa8f583c13de0edf3b WatchSource:0}: Error finding container d652379125d06942626fe6eb99a3c79aed043646ac7970fa8f583c13de0edf3b: Status 404 returned error can't find the container with id d652379125d06942626fe6eb99a3c79aed043646ac7970fa8f583c13de0edf3b Dec 13 03:15:01 crc kubenswrapper[5070]: I1213 03:15:01.312410 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 13 03:15:01 crc kubenswrapper[5070]: I1213 03:15:01.312847 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 03:15:01 crc kubenswrapper[5070]: I1213 03:15:01.312868 5070 scope.go:117] "RemoveContainer" containerID="c45b32a10a0c667ecc3fb1c2df88dce51460bc06b86c232efce02f89a2850437" Dec 13 03:15:01 crc kubenswrapper[5070]: I1213 03:15:01.317977 5070 generic.go:334] "Generic (PLEG): container finished" podID="22cbbb70-41e0-476b-9640-48c5c997cf72" containerID="66724e9d54ec859a3275ff82318d31d12435ce09922594453e332eaa87b9f4da" exitCode=0 Dec 13 03:15:01 crc kubenswrapper[5070]: I1213 03:15:01.318031 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" event={"ID":"22cbbb70-41e0-476b-9640-48c5c997cf72","Type":"ContainerDied","Data":"66724e9d54ec859a3275ff82318d31d12435ce09922594453e332eaa87b9f4da"} Dec 13 03:15:01 crc kubenswrapper[5070]: I1213 03:15:01.318068 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" event={"ID":"22cbbb70-41e0-476b-9640-48c5c997cf72","Type":"ContainerStarted","Data":"d652379125d06942626fe6eb99a3c79aed043646ac7970fa8f583c13de0edf3b"} Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.171992 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.172220 5070 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.182311 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.182351 5070 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="0bf54feb-a2d9-48bc-b512-dde3d45ba591" Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.185519 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.185555 5070 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="0bf54feb-a2d9-48bc-b512-dde3d45ba591" Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.532768 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.674099 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtdmt\" (UniqueName: \"kubernetes.io/projected/22cbbb70-41e0-476b-9640-48c5c997cf72-kube-api-access-dtdmt\") pod \"22cbbb70-41e0-476b-9640-48c5c997cf72\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.674196 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/22cbbb70-41e0-476b-9640-48c5c997cf72-secret-volume\") pod \"22cbbb70-41e0-476b-9640-48c5c997cf72\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.674234 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22cbbb70-41e0-476b-9640-48c5c997cf72-config-volume\") pod \"22cbbb70-41e0-476b-9640-48c5c997cf72\" (UID: \"22cbbb70-41e0-476b-9640-48c5c997cf72\") " Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.675310 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22cbbb70-41e0-476b-9640-48c5c997cf72-config-volume" (OuterVolumeSpecName: "config-volume") pod "22cbbb70-41e0-476b-9640-48c5c997cf72" (UID: "22cbbb70-41e0-476b-9640-48c5c997cf72"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.678888 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22cbbb70-41e0-476b-9640-48c5c997cf72-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "22cbbb70-41e0-476b-9640-48c5c997cf72" (UID: "22cbbb70-41e0-476b-9640-48c5c997cf72"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.678897 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22cbbb70-41e0-476b-9640-48c5c997cf72-kube-api-access-dtdmt" (OuterVolumeSpecName: "kube-api-access-dtdmt") pod "22cbbb70-41e0-476b-9640-48c5c997cf72" (UID: "22cbbb70-41e0-476b-9640-48c5c997cf72"). InnerVolumeSpecName "kube-api-access-dtdmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.776055 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtdmt\" (UniqueName: \"kubernetes.io/projected/22cbbb70-41e0-476b-9640-48c5c997cf72-kube-api-access-dtdmt\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.776374 5070 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/22cbbb70-41e0-476b-9640-48c5c997cf72-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:02 crc kubenswrapper[5070]: I1213 03:15:02.776387 5070 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22cbbb70-41e0-476b-9640-48c5c997cf72-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:03 crc kubenswrapper[5070]: I1213 03:15:03.333028 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" event={"ID":"22cbbb70-41e0-476b-9640-48c5c997cf72","Type":"ContainerDied","Data":"d652379125d06942626fe6eb99a3c79aed043646ac7970fa8f583c13de0edf3b"} Dec 13 03:15:03 crc kubenswrapper[5070]: I1213 03:15:03.333082 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d652379125d06942626fe6eb99a3c79aed043646ac7970fa8f583c13de0edf3b" Dec 13 03:15:03 crc kubenswrapper[5070]: I1213 03:15:03.333149 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.043667 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j9rhx"] Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.044509 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-j9rhx" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerName="registry-server" containerID="cri-o://e3f7a276cafef522541a276a57264707493cd7afd7d3d8ffda3193c324cb8ebc" gracePeriod=30 Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.053539 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lfs78"] Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.053839 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lfs78" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerName="registry-server" containerID="cri-o://8cfe61d6e66a78b04addb35438357158f34a2d1fc5f3803f7d398cc303ed9906" gracePeriod=30 Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.061022 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rx625"] Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.061369 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rx625" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerName="registry-server" containerID="cri-o://70505c61ef9186ee3955480b750c15bde15e44a281eaaebf9afe00fbc281bf84" gracePeriod=30 Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.098587 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vblq5"] Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.098863 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" podUID="d84fad97-769a-4f5d-8e19-d91d308675f6" containerName="marketplace-operator" containerID="cri-o://ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15" gracePeriod=30 Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.101648 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5qcs"] Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.101961 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h5qcs" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerName="registry-server" containerID="cri-o://925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc" gracePeriod=30 Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.107619 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pszz7"] Dec 13 03:15:07 crc kubenswrapper[5070]: E1213 03:15:07.107930 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22cbbb70-41e0-476b-9640-48c5c997cf72" containerName="collect-profiles" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.107951 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="22cbbb70-41e0-476b-9640-48c5c997cf72" containerName="collect-profiles" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.108089 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="22cbbb70-41e0-476b-9640-48c5c997cf72" containerName="collect-profiles" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.108559 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.108943 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q5mgn"] Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.109225 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-q5mgn" podUID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerName="registry-server" containerID="cri-o://1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4" gracePeriod=30 Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.113562 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pszz7"] Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.145141 5070 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-vblq5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.145221 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" podUID="d84fad97-769a-4f5d-8e19-d91d308675f6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.242678 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e6b959a6-f715-46c4-9192-c9c372246129-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pszz7\" (UID: \"e6b959a6-f715-46c4-9192-c9c372246129\") " pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.242776 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e6b959a6-f715-46c4-9192-c9c372246129-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pszz7\" (UID: \"e6b959a6-f715-46c4-9192-c9c372246129\") " pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.242831 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wsls\" (UniqueName: \"kubernetes.io/projected/e6b959a6-f715-46c4-9192-c9c372246129-kube-api-access-4wsls\") pod \"marketplace-operator-79b997595-pszz7\" (UID: \"e6b959a6-f715-46c4-9192-c9c372246129\") " pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.344125 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e6b959a6-f715-46c4-9192-c9c372246129-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pszz7\" (UID: \"e6b959a6-f715-46c4-9192-c9c372246129\") " pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.344246 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wsls\" (UniqueName: \"kubernetes.io/projected/e6b959a6-f715-46c4-9192-c9c372246129-kube-api-access-4wsls\") pod \"marketplace-operator-79b997595-pszz7\" (UID: \"e6b959a6-f715-46c4-9192-c9c372246129\") " pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.344281 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e6b959a6-f715-46c4-9192-c9c372246129-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pszz7\" (UID: \"e6b959a6-f715-46c4-9192-c9c372246129\") " pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.345544 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e6b959a6-f715-46c4-9192-c9c372246129-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pszz7\" (UID: \"e6b959a6-f715-46c4-9192-c9c372246129\") " pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.357977 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e6b959a6-f715-46c4-9192-c9c372246129-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pszz7\" (UID: \"e6b959a6-f715-46c4-9192-c9c372246129\") " pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.368865 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wsls\" (UniqueName: \"kubernetes.io/projected/e6b959a6-f715-46c4-9192-c9c372246129-kube-api-access-4wsls\") pod \"marketplace-operator-79b997595-pszz7\" (UID: \"e6b959a6-f715-46c4-9192-c9c372246129\") " pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.439132 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:07 crc kubenswrapper[5070]: I1213 03:15:07.684884 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pszz7"] Dec 13 03:15:07 crc kubenswrapper[5070]: W1213 03:15:07.692670 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6b959a6_f715_46c4_9192_c9c372246129.slice/crio-8e583c0227110cd8d8d3986a88587b98a9c8e0704abc6d6b84795b63a8289bde WatchSource:0}: Error finding container 8e583c0227110cd8d8d3986a88587b98a9c8e0704abc6d6b84795b63a8289bde: Status 404 returned error can't find the container with id 8e583c0227110cd8d8d3986a88587b98a9c8e0704abc6d6b84795b63a8289bde Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.100514 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.109554 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.136616 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.256674 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-catalog-content\") pod \"383cb754-9d42-43dd-9cb7-5238fec04ce5\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.256736 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-trusted-ca\") pod \"d84fad97-769a-4f5d-8e19-d91d308675f6\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.256795 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4tlq\" (UniqueName: \"kubernetes.io/projected/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-kube-api-access-q4tlq\") pod \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.256810 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzx8v\" (UniqueName: \"kubernetes.io/projected/383cb754-9d42-43dd-9cb7-5238fec04ce5-kube-api-access-hzx8v\") pod \"383cb754-9d42-43dd-9cb7-5238fec04ce5\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.256828 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k79b6\" (UniqueName: \"kubernetes.io/projected/d84fad97-769a-4f5d-8e19-d91d308675f6-kube-api-access-k79b6\") pod \"d84fad97-769a-4f5d-8e19-d91d308675f6\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.256886 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-catalog-content\") pod \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.256903 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-operator-metrics\") pod \"d84fad97-769a-4f5d-8e19-d91d308675f6\" (UID: \"d84fad97-769a-4f5d-8e19-d91d308675f6\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.256940 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-utilities\") pod \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\" (UID: \"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.256958 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-utilities\") pod \"383cb754-9d42-43dd-9cb7-5238fec04ce5\" (UID: \"383cb754-9d42-43dd-9cb7-5238fec04ce5\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.258400 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "d84fad97-769a-4f5d-8e19-d91d308675f6" (UID: "d84fad97-769a-4f5d-8e19-d91d308675f6"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.259141 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-utilities" (OuterVolumeSpecName: "utilities") pod "a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" (UID: "a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.260312 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-utilities" (OuterVolumeSpecName: "utilities") pod "383cb754-9d42-43dd-9cb7-5238fec04ce5" (UID: "383cb754-9d42-43dd-9cb7-5238fec04ce5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.266526 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d84fad97-769a-4f5d-8e19-d91d308675f6-kube-api-access-k79b6" (OuterVolumeSpecName: "kube-api-access-k79b6") pod "d84fad97-769a-4f5d-8e19-d91d308675f6" (UID: "d84fad97-769a-4f5d-8e19-d91d308675f6"). InnerVolumeSpecName "kube-api-access-k79b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.270316 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/383cb754-9d42-43dd-9cb7-5238fec04ce5-kube-api-access-hzx8v" (OuterVolumeSpecName: "kube-api-access-hzx8v") pod "383cb754-9d42-43dd-9cb7-5238fec04ce5" (UID: "383cb754-9d42-43dd-9cb7-5238fec04ce5"). InnerVolumeSpecName "kube-api-access-hzx8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.274114 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "d84fad97-769a-4f5d-8e19-d91d308675f6" (UID: "d84fad97-769a-4f5d-8e19-d91d308675f6"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.281754 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-kube-api-access-q4tlq" (OuterVolumeSpecName: "kube-api-access-q4tlq") pod "a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" (UID: "a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a"). InnerVolumeSpecName "kube-api-access-q4tlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.287120 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" (UID: "a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.358272 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.358305 5070 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.358318 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.358326 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.358334 5070 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d84fad97-769a-4f5d-8e19-d91d308675f6-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.358343 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4tlq\" (UniqueName: \"kubernetes.io/projected/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a-kube-api-access-q4tlq\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.358352 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzx8v\" (UniqueName: \"kubernetes.io/projected/383cb754-9d42-43dd-9cb7-5238fec04ce5-kube-api-access-hzx8v\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.358360 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k79b6\" (UniqueName: \"kubernetes.io/projected/d84fad97-769a-4f5d-8e19-d91d308675f6-kube-api-access-k79b6\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.375883 5070 generic.go:334] "Generic (PLEG): container finished" podID="d84fad97-769a-4f5d-8e19-d91d308675f6" containerID="ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15" exitCode=0 Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.375954 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" event={"ID":"d84fad97-769a-4f5d-8e19-d91d308675f6","Type":"ContainerDied","Data":"ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.376038 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" event={"ID":"d84fad97-769a-4f5d-8e19-d91d308675f6","Type":"ContainerDied","Data":"5c04893236b6f556e708ee8202dade3fe2b4d733c8358902ed075733d11f790d"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.375998 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vblq5" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.376055 5070 scope.go:117] "RemoveContainer" containerID="ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.382287 5070 generic.go:334] "Generic (PLEG): container finished" podID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerID="70505c61ef9186ee3955480b750c15bde15e44a281eaaebf9afe00fbc281bf84" exitCode=0 Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.382334 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx625" event={"ID":"9e4db140-cd43-41ac-a1d7-1913a66ba814","Type":"ContainerDied","Data":"70505c61ef9186ee3955480b750c15bde15e44a281eaaebf9afe00fbc281bf84"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.385892 5070 generic.go:334] "Generic (PLEG): container finished" podID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerID="e3f7a276cafef522541a276a57264707493cd7afd7d3d8ffda3193c324cb8ebc" exitCode=0 Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.385944 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j9rhx" event={"ID":"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3","Type":"ContainerDied","Data":"e3f7a276cafef522541a276a57264707493cd7afd7d3d8ffda3193c324cb8ebc"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.386618 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "383cb754-9d42-43dd-9cb7-5238fec04ce5" (UID: "383cb754-9d42-43dd-9cb7-5238fec04ce5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.387252 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" event={"ID":"e6b959a6-f715-46c4-9192-c9c372246129","Type":"ContainerStarted","Data":"c394d8af16b04c3892d3eadec7b0d8593399da2dc7399c4fc0e1b9c59df375a0"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.387275 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" event={"ID":"e6b959a6-f715-46c4-9192-c9c372246129","Type":"ContainerStarted","Data":"8e583c0227110cd8d8d3986a88587b98a9c8e0704abc6d6b84795b63a8289bde"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.388245 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.388423 5070 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-pszz7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.58:8080/healthz\": dial tcp 10.217.0.58:8080: connect: connection refused" start-of-body= Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.388872 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" podUID="e6b959a6-f715-46c4-9192-c9c372246129" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.58:8080/healthz\": dial tcp 10.217.0.58:8080: connect: connection refused" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.389998 5070 generic.go:334] "Generic (PLEG): container finished" podID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerID="1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4" exitCode=0 Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.390190 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q5mgn" event={"ID":"383cb754-9d42-43dd-9cb7-5238fec04ce5","Type":"ContainerDied","Data":"1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.390219 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q5mgn" event={"ID":"383cb754-9d42-43dd-9cb7-5238fec04ce5","Type":"ContainerDied","Data":"f2d7938b95023d5d136f305cf6ba89da4a8d1b17da306d121ba950e6140e05ba"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.390277 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q5mgn" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.403179 5070 generic.go:334] "Generic (PLEG): container finished" podID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerID="8cfe61d6e66a78b04addb35438357158f34a2d1fc5f3803f7d398cc303ed9906" exitCode=0 Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.403271 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfs78" event={"ID":"2782a41c-ce36-4c0d-89c1-27b5e12e9b00","Type":"ContainerDied","Data":"8cfe61d6e66a78b04addb35438357158f34a2d1fc5f3803f7d398cc303ed9906"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.404393 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" podStartSLOduration=1.404368126 podStartE2EDuration="1.404368126s" podCreationTimestamp="2025-12-13 03:15:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:15:08.401362691 +0000 UTC m=+200.637206237" watchObservedRunningTime="2025-12-13 03:15:08.404368126 +0000 UTC m=+200.640211692" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.414736 5070 scope.go:117] "RemoveContainer" containerID="ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15" Dec 13 03:15:08 crc kubenswrapper[5070]: E1213 03:15:08.416010 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15\": container with ID starting with ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15 not found: ID does not exist" containerID="ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.416044 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15"} err="failed to get container status \"ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15\": rpc error: code = NotFound desc = could not find container \"ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15\": container with ID starting with ffa26983f13e36b661fade44743a4771d311e4f13e25d9ad6e235d19ded6eb15 not found: ID does not exist" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.416080 5070 scope.go:117] "RemoveContainer" containerID="1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.419747 5070 generic.go:334] "Generic (PLEG): container finished" podID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerID="925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc" exitCode=0 Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.419816 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5qcs" event={"ID":"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a","Type":"ContainerDied","Data":"925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.419850 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5qcs" event={"ID":"a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a","Type":"ContainerDied","Data":"012d9feee5c506951274f8ed0abda406596ce4f6a63d5f11f4549cc587827945"} Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.427851 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5qcs" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.428768 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vblq5"] Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.434793 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vblq5"] Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.452377 5070 scope.go:117] "RemoveContainer" containerID="14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.457997 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q5mgn"] Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.459604 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/383cb754-9d42-43dd-9cb7-5238fec04ce5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.471232 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-q5mgn"] Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.479064 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5qcs"] Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.479343 5070 scope.go:117] "RemoveContainer" containerID="6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.482140 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5qcs"] Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.500843 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.512933 5070 scope.go:117] "RemoveContainer" containerID="1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4" Dec 13 03:15:08 crc kubenswrapper[5070]: E1213 03:15:08.513299 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4\": container with ID starting with 1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4 not found: ID does not exist" containerID="1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.513354 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4"} err="failed to get container status \"1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4\": rpc error: code = NotFound desc = could not find container \"1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4\": container with ID starting with 1717ee3a3fc150d71953d6f734511d584df1e81d94146f3e66a682c2a16fb0f4 not found: ID does not exist" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.513392 5070 scope.go:117] "RemoveContainer" containerID="14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461" Dec 13 03:15:08 crc kubenswrapper[5070]: E1213 03:15:08.513653 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461\": container with ID starting with 14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461 not found: ID does not exist" containerID="14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.513693 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461"} err="failed to get container status \"14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461\": rpc error: code = NotFound desc = could not find container \"14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461\": container with ID starting with 14071c5c7ef311133974965938aac0624c46cffb364f3a5c8c0ce0bd56962461 not found: ID does not exist" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.513712 5070 scope.go:117] "RemoveContainer" containerID="6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce" Dec 13 03:15:08 crc kubenswrapper[5070]: E1213 03:15:08.513963 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce\": container with ID starting with 6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce not found: ID does not exist" containerID="6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.513987 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce"} err="failed to get container status \"6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce\": rpc error: code = NotFound desc = could not find container \"6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce\": container with ID starting with 6610b7acee3f3314defad3e5cb083b475ab6f6ef31129c80a3e3aca52d5528ce not found: ID does not exist" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.514006 5070 scope.go:117] "RemoveContainer" containerID="925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.538220 5070 scope.go:117] "RemoveContainer" containerID="8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.546587 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx625" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.547598 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.563717 5070 scope.go:117] "RemoveContainer" containerID="4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.599396 5070 scope.go:117] "RemoveContainer" containerID="925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc" Dec 13 03:15:08 crc kubenswrapper[5070]: E1213 03:15:08.599914 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc\": container with ID starting with 925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc not found: ID does not exist" containerID="925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.599945 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc"} err="failed to get container status \"925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc\": rpc error: code = NotFound desc = could not find container \"925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc\": container with ID starting with 925341df345ffead9caedd9d0bfb54d060debde6b0770aae06c516f364e657bc not found: ID does not exist" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.599978 5070 scope.go:117] "RemoveContainer" containerID="8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293" Dec 13 03:15:08 crc kubenswrapper[5070]: E1213 03:15:08.600390 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293\": container with ID starting with 8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293 not found: ID does not exist" containerID="8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.600407 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293"} err="failed to get container status \"8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293\": rpc error: code = NotFound desc = could not find container \"8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293\": container with ID starting with 8e500363a23befb5fafc4d82fc0e2b06f99d85adee5b13f24244af4b208b5293 not found: ID does not exist" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.600421 5070 scope.go:117] "RemoveContainer" containerID="4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a" Dec 13 03:15:08 crc kubenswrapper[5070]: E1213 03:15:08.600667 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a\": container with ID starting with 4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a not found: ID does not exist" containerID="4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.600694 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a"} err="failed to get container status \"4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a\": rpc error: code = NotFound desc = could not find container \"4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a\": container with ID starting with 4cfbd5ef9e6ffe36d857f9180ce8d76defdd8df1f2a7c58603690717df84052a not found: ID does not exist" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.661384 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbtvm\" (UniqueName: \"kubernetes.io/projected/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-kube-api-access-vbtvm\") pod \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.661595 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-utilities\") pod \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.661656 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-catalog-content\") pod \"9e4db140-cd43-41ac-a1d7-1913a66ba814\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.661679 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-catalog-content\") pod \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.661748 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kn69\" (UniqueName: \"kubernetes.io/projected/9e4db140-cd43-41ac-a1d7-1913a66ba814-kube-api-access-4kn69\") pod \"9e4db140-cd43-41ac-a1d7-1913a66ba814\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.661807 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-catalog-content\") pod \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\" (UID: \"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.661856 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6csrd\" (UniqueName: \"kubernetes.io/projected/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-kube-api-access-6csrd\") pod \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.661906 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-utilities\") pod \"9e4db140-cd43-41ac-a1d7-1913a66ba814\" (UID: \"9e4db140-cd43-41ac-a1d7-1913a66ba814\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.661982 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-utilities\") pod \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\" (UID: \"2782a41c-ce36-4c0d-89c1-27b5e12e9b00\") " Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.663050 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-utilities" (OuterVolumeSpecName: "utilities") pod "b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" (UID: "b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.663320 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-utilities" (OuterVolumeSpecName: "utilities") pod "2782a41c-ce36-4c0d-89c1-27b5e12e9b00" (UID: "2782a41c-ce36-4c0d-89c1-27b5e12e9b00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.665793 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-kube-api-access-vbtvm" (OuterVolumeSpecName: "kube-api-access-vbtvm") pod "b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" (UID: "b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3"). InnerVolumeSpecName "kube-api-access-vbtvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.665854 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-utilities" (OuterVolumeSpecName: "utilities") pod "9e4db140-cd43-41ac-a1d7-1913a66ba814" (UID: "9e4db140-cd43-41ac-a1d7-1913a66ba814"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.666681 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e4db140-cd43-41ac-a1d7-1913a66ba814-kube-api-access-4kn69" (OuterVolumeSpecName: "kube-api-access-4kn69") pod "9e4db140-cd43-41ac-a1d7-1913a66ba814" (UID: "9e4db140-cd43-41ac-a1d7-1913a66ba814"). InnerVolumeSpecName "kube-api-access-4kn69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.667014 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-kube-api-access-6csrd" (OuterVolumeSpecName: "kube-api-access-6csrd") pod "2782a41c-ce36-4c0d-89c1-27b5e12e9b00" (UID: "2782a41c-ce36-4c0d-89c1-27b5e12e9b00"). InnerVolumeSpecName "kube-api-access-6csrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.721592 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" (UID: "b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.722809 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2782a41c-ce36-4c0d-89c1-27b5e12e9b00" (UID: "2782a41c-ce36-4c0d-89c1-27b5e12e9b00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.728045 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9e4db140-cd43-41ac-a1d7-1913a66ba814" (UID: "9e4db140-cd43-41ac-a1d7-1913a66ba814"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.764232 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbtvm\" (UniqueName: \"kubernetes.io/projected/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-kube-api-access-vbtvm\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.764319 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.764337 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.764351 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.764365 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kn69\" (UniqueName: \"kubernetes.io/projected/9e4db140-cd43-41ac-a1d7-1913a66ba814-kube-api-access-4kn69\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.764378 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.764390 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6csrd\" (UniqueName: \"kubernetes.io/projected/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-kube-api-access-6csrd\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.764430 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e4db140-cd43-41ac-a1d7-1913a66ba814-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:08 crc kubenswrapper[5070]: I1213 03:15:08.764479 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2782a41c-ce36-4c0d-89c1-27b5e12e9b00-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.430503 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j9rhx" event={"ID":"b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3","Type":"ContainerDied","Data":"d4f471a17488ca77df509771673b3bc38f09dcb13b2e927d2ef3936441a55354"} Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.431241 5070 scope.go:117] "RemoveContainer" containerID="e3f7a276cafef522541a276a57264707493cd7afd7d3d8ffda3193c324cb8ebc" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.430552 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j9rhx" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.437312 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfs78" event={"ID":"2782a41c-ce36-4c0d-89c1-27b5e12e9b00","Type":"ContainerDied","Data":"0b96368ad8167e8d78f1219c30e22e9471616706e154f0515c9158089933f583"} Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.437359 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfs78" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.454619 5070 scope.go:117] "RemoveContainer" containerID="692f16c72ff955c4d9b1bf9a57be181a758db4674117015ea970bcb633556b8e" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.454828 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx625" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.454832 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx625" event={"ID":"9e4db140-cd43-41ac-a1d7-1913a66ba814","Type":"ContainerDied","Data":"ae93555c1594239493e557b701b078462447b7572ca489a1fb9f03ca596a288e"} Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.458330 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-pszz7" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.505358 5070 scope.go:117] "RemoveContainer" containerID="55dd37681d604a71243c4bfbaa0ff718af4790c1ea5c7c0132dedff80adba549" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.510221 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j9rhx"] Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.519408 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-j9rhx"] Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.527124 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lfs78"] Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.536369 5070 scope.go:117] "RemoveContainer" containerID="8cfe61d6e66a78b04addb35438357158f34a2d1fc5f3803f7d398cc303ed9906" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.539280 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lfs78"] Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.544991 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rx625"] Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.545025 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rx625"] Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.554312 5070 scope.go:117] "RemoveContainer" containerID="fe97b6f2ed3d3fd757c8f94d54381670e85de662fcd539e9a0f1c26ad36b5668" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.566125 5070 scope.go:117] "RemoveContainer" containerID="53e21aaf24da75639aa694e5c2a14fd1d30ffd671d9bec0d7f119ef9f7ae26c9" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.579704 5070 scope.go:117] "RemoveContainer" containerID="70505c61ef9186ee3955480b750c15bde15e44a281eaaebf9afe00fbc281bf84" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.594376 5070 scope.go:117] "RemoveContainer" containerID="97215289fe425d5ac8c5b682aace07bc3e2391bbaf4be1dcedc77a8aa5eb51af" Dec 13 03:15:09 crc kubenswrapper[5070]: I1213 03:15:09.607011 5070 scope.go:117] "RemoveContainer" containerID="3d240eac20f24c2a6cf32bbfc86692f4294d999b1a49358ffc05f4f323b9186c" Dec 13 03:15:10 crc kubenswrapper[5070]: I1213 03:15:10.181375 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" path="/var/lib/kubelet/pods/2782a41c-ce36-4c0d-89c1-27b5e12e9b00/volumes" Dec 13 03:15:10 crc kubenswrapper[5070]: I1213 03:15:10.182999 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="383cb754-9d42-43dd-9cb7-5238fec04ce5" path="/var/lib/kubelet/pods/383cb754-9d42-43dd-9cb7-5238fec04ce5/volumes" Dec 13 03:15:10 crc kubenswrapper[5070]: I1213 03:15:10.184687 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" path="/var/lib/kubelet/pods/9e4db140-cd43-41ac-a1d7-1913a66ba814/volumes" Dec 13 03:15:10 crc kubenswrapper[5070]: I1213 03:15:10.187700 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" path="/var/lib/kubelet/pods/a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a/volumes" Dec 13 03:15:10 crc kubenswrapper[5070]: I1213 03:15:10.189491 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" path="/var/lib/kubelet/pods/b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3/volumes" Dec 13 03:15:10 crc kubenswrapper[5070]: I1213 03:15:10.192162 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d84fad97-769a-4f5d-8e19-d91d308675f6" path="/var/lib/kubelet/pods/d84fad97-769a-4f5d-8e19-d91d308675f6/volumes" Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.467607 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qk5h4"] Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.469393 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" podUID="e26ec275-42a6-42af-a6ed-0872bd777acf" containerName="controller-manager" containerID="cri-o://5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec" gracePeriod=30 Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.587487 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z"] Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.588331 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" podUID="1ba4ef14-c563-4903-b359-f80b487d8ced" containerName="route-controller-manager" containerID="cri-o://12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0" gracePeriod=30 Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.825358 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.929635 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.940244 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-proxy-ca-bundles\") pod \"e26ec275-42a6-42af-a6ed-0872bd777acf\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.940311 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-client-ca\") pod \"e26ec275-42a6-42af-a6ed-0872bd777acf\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.940373 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e26ec275-42a6-42af-a6ed-0872bd777acf-serving-cert\") pod \"e26ec275-42a6-42af-a6ed-0872bd777acf\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.940399 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wx7n6\" (UniqueName: \"kubernetes.io/projected/e26ec275-42a6-42af-a6ed-0872bd777acf-kube-api-access-wx7n6\") pod \"e26ec275-42a6-42af-a6ed-0872bd777acf\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.940481 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-config\") pod \"e26ec275-42a6-42af-a6ed-0872bd777acf\" (UID: \"e26ec275-42a6-42af-a6ed-0872bd777acf\") " Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.941672 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-client-ca" (OuterVolumeSpecName: "client-ca") pod "e26ec275-42a6-42af-a6ed-0872bd777acf" (UID: "e26ec275-42a6-42af-a6ed-0872bd777acf"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.941770 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-config" (OuterVolumeSpecName: "config") pod "e26ec275-42a6-42af-a6ed-0872bd777acf" (UID: "e26ec275-42a6-42af-a6ed-0872bd777acf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.942472 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.942527 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.942605 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e26ec275-42a6-42af-a6ed-0872bd777acf" (UID: "e26ec275-42a6-42af-a6ed-0872bd777acf"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.947104 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e26ec275-42a6-42af-a6ed-0872bd777acf-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e26ec275-42a6-42af-a6ed-0872bd777acf" (UID: "e26ec275-42a6-42af-a6ed-0872bd777acf"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:15:21 crc kubenswrapper[5070]: I1213 03:15:21.947740 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e26ec275-42a6-42af-a6ed-0872bd777acf-kube-api-access-wx7n6" (OuterVolumeSpecName: "kube-api-access-wx7n6") pod "e26ec275-42a6-42af-a6ed-0872bd777acf" (UID: "e26ec275-42a6-42af-a6ed-0872bd777acf"). InnerVolumeSpecName "kube-api-access-wx7n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.042113 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-client-ca\") pod \"1ba4ef14-c563-4903-b359-f80b487d8ced\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.042940 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5h54\" (UniqueName: \"kubernetes.io/projected/1ba4ef14-c563-4903-b359-f80b487d8ced-kube-api-access-m5h54\") pod \"1ba4ef14-c563-4903-b359-f80b487d8ced\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.043123 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-client-ca" (OuterVolumeSpecName: "client-ca") pod "1ba4ef14-c563-4903-b359-f80b487d8ced" (UID: "1ba4ef14-c563-4903-b359-f80b487d8ced"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.043588 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-config\") pod \"1ba4ef14-c563-4903-b359-f80b487d8ced\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.043652 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ba4ef14-c563-4903-b359-f80b487d8ced-serving-cert\") pod \"1ba4ef14-c563-4903-b359-f80b487d8ced\" (UID: \"1ba4ef14-c563-4903-b359-f80b487d8ced\") " Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.044023 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.044059 5070 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.044080 5070 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.044098 5070 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e26ec275-42a6-42af-a6ed-0872bd777acf-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.044113 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e26ec275-42a6-42af-a6ed-0872bd777acf-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.044129 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wx7n6\" (UniqueName: \"kubernetes.io/projected/e26ec275-42a6-42af-a6ed-0872bd777acf-kube-api-access-wx7n6\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.044179 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-config" (OuterVolumeSpecName: "config") pod "1ba4ef14-c563-4903-b359-f80b487d8ced" (UID: "1ba4ef14-c563-4903-b359-f80b487d8ced"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.046943 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ba4ef14-c563-4903-b359-f80b487d8ced-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1ba4ef14-c563-4903-b359-f80b487d8ced" (UID: "1ba4ef14-c563-4903-b359-f80b487d8ced"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.047024 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ba4ef14-c563-4903-b359-f80b487d8ced-kube-api-access-m5h54" (OuterVolumeSpecName: "kube-api-access-m5h54") pod "1ba4ef14-c563-4903-b359-f80b487d8ced" (UID: "1ba4ef14-c563-4903-b359-f80b487d8ced"). InnerVolumeSpecName "kube-api-access-m5h54". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.145370 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ba4ef14-c563-4903-b359-f80b487d8ced-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.145415 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5h54\" (UniqueName: \"kubernetes.io/projected/1ba4ef14-c563-4903-b359-f80b487d8ced-kube-api-access-m5h54\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.145430 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ba4ef14-c563-4903-b359-f80b487d8ced-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.531303 5070 generic.go:334] "Generic (PLEG): container finished" podID="1ba4ef14-c563-4903-b359-f80b487d8ced" containerID="12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0" exitCode=0 Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.531364 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" event={"ID":"1ba4ef14-c563-4903-b359-f80b487d8ced","Type":"ContainerDied","Data":"12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0"} Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.531391 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" event={"ID":"1ba4ef14-c563-4903-b359-f80b487d8ced","Type":"ContainerDied","Data":"574dbd0d94a157c547bbcb99e0ffeb1cdc0b7f9d3c781a7c08ab19be782831fc"} Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.531408 5070 scope.go:117] "RemoveContainer" containerID="12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.531551 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.535038 5070 generic.go:334] "Generic (PLEG): container finished" podID="e26ec275-42a6-42af-a6ed-0872bd777acf" containerID="5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec" exitCode=0 Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.535084 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" event={"ID":"e26ec275-42a6-42af-a6ed-0872bd777acf","Type":"ContainerDied","Data":"5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec"} Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.535120 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" event={"ID":"e26ec275-42a6-42af-a6ed-0872bd777acf","Type":"ContainerDied","Data":"2bb2e9188d66aa3e89327e71b098c5b8ec6333eff88b88f4625c32dc3c18128a"} Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.535179 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qk5h4" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.551534 5070 scope.go:117] "RemoveContainer" containerID="12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.554878 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0\": container with ID starting with 12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0 not found: ID does not exist" containerID="12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.554918 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0"} err="failed to get container status \"12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0\": rpc error: code = NotFound desc = could not find container \"12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0\": container with ID starting with 12ee602fd25ff6548928b7d467399f3a82186c392108eaf09365d2d206aff2a0 not found: ID does not exist" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.554945 5070 scope.go:117] "RemoveContainer" containerID="5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.555371 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z"] Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.558832 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-27s4z"] Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.569570 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qk5h4"] Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.570603 5070 scope.go:117] "RemoveContainer" containerID="5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.571099 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec\": container with ID starting with 5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec not found: ID does not exist" containerID="5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.571131 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec"} err="failed to get container status \"5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec\": rpc error: code = NotFound desc = could not find container \"5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec\": container with ID starting with 5bf4aafb16a3734e016df6f6f0881707b3c56144b647ddf961cdd3f099d61dec not found: ID does not exist" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.573869 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qk5h4"] Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.628941 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9"] Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629122 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerName="extract-utilities" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629133 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerName="extract-utilities" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629147 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerName="extract-content" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629153 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerName="extract-content" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629163 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629168 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629175 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629180 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629187 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerName="extract-content" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629193 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerName="extract-content" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629199 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerName="extract-content" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629205 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerName="extract-content" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629215 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e26ec275-42a6-42af-a6ed-0872bd777acf" containerName="controller-manager" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629221 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="e26ec275-42a6-42af-a6ed-0872bd777acf" containerName="controller-manager" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629228 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629233 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629239 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ba4ef14-c563-4903-b359-f80b487d8ced" containerName="route-controller-manager" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629244 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ba4ef14-c563-4903-b359-f80b487d8ced" containerName="route-controller-manager" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629252 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerName="extract-utilities" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629257 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerName="extract-utilities" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629307 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629316 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629325 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerName="extract-content" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629331 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerName="extract-content" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629342 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629350 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629363 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerName="extract-utilities" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629371 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerName="extract-utilities" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629380 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerName="extract-content" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629386 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerName="extract-content" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629397 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerName="extract-utilities" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629404 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerName="extract-utilities" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629415 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerName="extract-utilities" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629422 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerName="extract-utilities" Dec 13 03:15:22 crc kubenswrapper[5070]: E1213 03:15:22.629432 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d84fad97-769a-4f5d-8e19-d91d308675f6" containerName="marketplace-operator" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629439 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d84fad97-769a-4f5d-8e19-d91d308675f6" containerName="marketplace-operator" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629547 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e4db140-cd43-41ac-a1d7-1913a66ba814" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629559 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="383cb754-9d42-43dd-9cb7-5238fec04ce5" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629567 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="a88af4a7-bdb6-4fca-b7fb-c6fe7f85bc4a" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629573 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d84fad97-769a-4f5d-8e19-d91d308675f6" containerName="marketplace-operator" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629582 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3c6dca7-36c0-4fbc-90b0-a4f91bd588f3" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629591 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ba4ef14-c563-4903-b359-f80b487d8ced" containerName="route-controller-manager" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629601 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="2782a41c-ce36-4c0d-89c1-27b5e12e9b00" containerName="registry-server" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629609 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="e26ec275-42a6-42af-a6ed-0872bd777acf" containerName="controller-manager" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.629942 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.632387 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.632607 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.632779 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.632994 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.633233 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.633392 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.640847 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9"] Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.751583 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24907aa1-75fd-490a-b416-bcff3fcf73c6-config\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.751645 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/24907aa1-75fd-490a-b416-bcff3fcf73c6-client-ca\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.751740 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nqgq\" (UniqueName: \"kubernetes.io/projected/24907aa1-75fd-490a-b416-bcff3fcf73c6-kube-api-access-7nqgq\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.751805 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24907aa1-75fd-490a-b416-bcff3fcf73c6-serving-cert\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.852653 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24907aa1-75fd-490a-b416-bcff3fcf73c6-serving-cert\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.853047 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24907aa1-75fd-490a-b416-bcff3fcf73c6-config\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.853179 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/24907aa1-75fd-490a-b416-bcff3fcf73c6-client-ca\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.853279 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nqgq\" (UniqueName: \"kubernetes.io/projected/24907aa1-75fd-490a-b416-bcff3fcf73c6-kube-api-access-7nqgq\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.854107 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/24907aa1-75fd-490a-b416-bcff3fcf73c6-client-ca\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.854180 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24907aa1-75fd-490a-b416-bcff3fcf73c6-config\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.856823 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24907aa1-75fd-490a-b416-bcff3fcf73c6-serving-cert\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.869862 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nqgq\" (UniqueName: \"kubernetes.io/projected/24907aa1-75fd-490a-b416-bcff3fcf73c6-kube-api-access-7nqgq\") pod \"route-controller-manager-9bcf6dbd5-9n7p9\" (UID: \"24907aa1-75fd-490a-b416-bcff3fcf73c6\") " pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:22 crc kubenswrapper[5070]: I1213 03:15:22.948927 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.360645 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9"] Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.544618 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" event={"ID":"24907aa1-75fd-490a-b416-bcff3fcf73c6","Type":"ContainerStarted","Data":"a28b6bfc23368fe58cf8a19aa5538a937b2ece3b3a8709d71bc5984a250337d6"} Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.545254 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" event={"ID":"24907aa1-75fd-490a-b416-bcff3fcf73c6","Type":"ContainerStarted","Data":"2fe90810404893ce69e13f8a6118dd53d9c0597b0fe7aeeba54fb7908470a1e4"} Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.546233 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.547769 5070 patch_prober.go:28] interesting pod/route-controller-manager-9bcf6dbd5-9n7p9 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" start-of-body= Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.547835 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" podUID="24907aa1-75fd-490a-b416-bcff3fcf73c6" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.561715 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" podStartSLOduration=2.561693537 podStartE2EDuration="2.561693537s" podCreationTimestamp="2025-12-13 03:15:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:15:23.56038166 +0000 UTC m=+215.796225216" watchObservedRunningTime="2025-12-13 03:15:23.561693537 +0000 UTC m=+215.797537083" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.631955 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-lp7jr"] Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.632897 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.636321 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.636535 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.636702 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.637080 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.637520 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.638333 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.647019 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.650567 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-lp7jr"] Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.772392 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-config\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.772471 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-proxy-ca-bundles\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.772513 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/340a012b-be3a-4c31-83c3-f5d127cb5f19-serving-cert\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.772531 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-client-ca\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.772552 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q7g9\" (UniqueName: \"kubernetes.io/projected/340a012b-be3a-4c31-83c3-f5d127cb5f19-kube-api-access-7q7g9\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.873709 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-config\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.873771 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-proxy-ca-bundles\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.873828 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/340a012b-be3a-4c31-83c3-f5d127cb5f19-serving-cert\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.873853 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-client-ca\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.873881 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q7g9\" (UniqueName: \"kubernetes.io/projected/340a012b-be3a-4c31-83c3-f5d127cb5f19-kube-api-access-7q7g9\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.875017 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-client-ca\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.875190 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-proxy-ca-bundles\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.875204 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-config\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.883673 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/340a012b-be3a-4c31-83c3-f5d127cb5f19-serving-cert\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.899043 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q7g9\" (UniqueName: \"kubernetes.io/projected/340a012b-be3a-4c31-83c3-f5d127cb5f19-kube-api-access-7q7g9\") pod \"controller-manager-c476db947-lp7jr\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:23 crc kubenswrapper[5070]: I1213 03:15:23.964298 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:24 crc kubenswrapper[5070]: I1213 03:15:24.174461 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ba4ef14-c563-4903-b359-f80b487d8ced" path="/var/lib/kubelet/pods/1ba4ef14-c563-4903-b359-f80b487d8ced/volumes" Dec 13 03:15:24 crc kubenswrapper[5070]: I1213 03:15:24.175569 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e26ec275-42a6-42af-a6ed-0872bd777acf" path="/var/lib/kubelet/pods/e26ec275-42a6-42af-a6ed-0872bd777acf/volumes" Dec 13 03:15:24 crc kubenswrapper[5070]: I1213 03:15:24.193182 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-lp7jr"] Dec 13 03:15:24 crc kubenswrapper[5070]: I1213 03:15:24.549794 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" event={"ID":"340a012b-be3a-4c31-83c3-f5d127cb5f19","Type":"ContainerStarted","Data":"739d6f8a771dc9373ebc4499443ccbc471486bf23b0e616c847ae4399444845c"} Dec 13 03:15:24 crc kubenswrapper[5070]: I1213 03:15:24.550102 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" event={"ID":"340a012b-be3a-4c31-83c3-f5d127cb5f19","Type":"ContainerStarted","Data":"2e99f5a6a76e12ee52dc88dab846c6656e8b1ec79381a4fbbc3f1d386bd57281"} Dec 13 03:15:24 crc kubenswrapper[5070]: I1213 03:15:24.556052 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-9bcf6dbd5-9n7p9" Dec 13 03:15:24 crc kubenswrapper[5070]: I1213 03:15:24.583212 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" podStartSLOduration=3.583194168 podStartE2EDuration="3.583194168s" podCreationTimestamp="2025-12-13 03:15:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:15:24.581680564 +0000 UTC m=+216.817524120" watchObservedRunningTime="2025-12-13 03:15:24.583194168 +0000 UTC m=+216.819037714" Dec 13 03:15:25 crc kubenswrapper[5070]: I1213 03:15:25.554237 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:25 crc kubenswrapper[5070]: I1213 03:15:25.559012 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.689335 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2dbtp"] Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.690813 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.694390 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.700735 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2dbtp"] Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.833755 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhbzt\" (UniqueName: \"kubernetes.io/projected/6cee5946-a6c3-4d8e-a6f3-7200c45c3a98-kube-api-access-hhbzt\") pod \"certified-operators-2dbtp\" (UID: \"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98\") " pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.833836 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cee5946-a6c3-4d8e-a6f3-7200c45c3a98-catalog-content\") pod \"certified-operators-2dbtp\" (UID: \"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98\") " pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.833866 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cee5946-a6c3-4d8e-a6f3-7200c45c3a98-utilities\") pod \"certified-operators-2dbtp\" (UID: \"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98\") " pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.885042 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-db5z5"] Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.885976 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.887767 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.894722 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-db5z5"] Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.936240 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cee5946-a6c3-4d8e-a6f3-7200c45c3a98-catalog-content\") pod \"certified-operators-2dbtp\" (UID: \"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98\") " pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.937280 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cee5946-a6c3-4d8e-a6f3-7200c45c3a98-utilities\") pod \"certified-operators-2dbtp\" (UID: \"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98\") " pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.936995 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cee5946-a6c3-4d8e-a6f3-7200c45c3a98-catalog-content\") pod \"certified-operators-2dbtp\" (UID: \"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98\") " pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.937421 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhbzt\" (UniqueName: \"kubernetes.io/projected/6cee5946-a6c3-4d8e-a6f3-7200c45c3a98-kube-api-access-hhbzt\") pod \"certified-operators-2dbtp\" (UID: \"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98\") " pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.937871 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cee5946-a6c3-4d8e-a6f3-7200c45c3a98-utilities\") pod \"certified-operators-2dbtp\" (UID: \"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98\") " pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:28 crc kubenswrapper[5070]: I1213 03:15:28.956583 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhbzt\" (UniqueName: \"kubernetes.io/projected/6cee5946-a6c3-4d8e-a6f3-7200c45c3a98-kube-api-access-hhbzt\") pod \"certified-operators-2dbtp\" (UID: \"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98\") " pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.013555 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.038786 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ef6e31-9820-4f3c-9d75-10e8286eb467-catalog-content\") pod \"community-operators-db5z5\" (UID: \"99ef6e31-9820-4f3c-9d75-10e8286eb467\") " pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.039183 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ef6e31-9820-4f3c-9d75-10e8286eb467-utilities\") pod \"community-operators-db5z5\" (UID: \"99ef6e31-9820-4f3c-9d75-10e8286eb467\") " pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.039217 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtcch\" (UniqueName: \"kubernetes.io/projected/99ef6e31-9820-4f3c-9d75-10e8286eb467-kube-api-access-rtcch\") pod \"community-operators-db5z5\" (UID: \"99ef6e31-9820-4f3c-9d75-10e8286eb467\") " pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.140244 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtcch\" (UniqueName: \"kubernetes.io/projected/99ef6e31-9820-4f3c-9d75-10e8286eb467-kube-api-access-rtcch\") pod \"community-operators-db5z5\" (UID: \"99ef6e31-9820-4f3c-9d75-10e8286eb467\") " pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.140375 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ef6e31-9820-4f3c-9d75-10e8286eb467-catalog-content\") pod \"community-operators-db5z5\" (UID: \"99ef6e31-9820-4f3c-9d75-10e8286eb467\") " pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.140402 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ef6e31-9820-4f3c-9d75-10e8286eb467-utilities\") pod \"community-operators-db5z5\" (UID: \"99ef6e31-9820-4f3c-9d75-10e8286eb467\") " pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.140907 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99ef6e31-9820-4f3c-9d75-10e8286eb467-utilities\") pod \"community-operators-db5z5\" (UID: \"99ef6e31-9820-4f3c-9d75-10e8286eb467\") " pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.141198 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99ef6e31-9820-4f3c-9d75-10e8286eb467-catalog-content\") pod \"community-operators-db5z5\" (UID: \"99ef6e31-9820-4f3c-9d75-10e8286eb467\") " pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.160542 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtcch\" (UniqueName: \"kubernetes.io/projected/99ef6e31-9820-4f3c-9d75-10e8286eb467-kube-api-access-rtcch\") pod \"community-operators-db5z5\" (UID: \"99ef6e31-9820-4f3c-9d75-10e8286eb467\") " pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.201323 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.454223 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2dbtp"] Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.587609 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dbtp" event={"ID":"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98","Type":"ContainerStarted","Data":"77f161b3c0487fce3ce57946a05405190fa6bcccd0d483d72cde0f5de24d8398"} Dec 13 03:15:29 crc kubenswrapper[5070]: I1213 03:15:29.593148 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-db5z5"] Dec 13 03:15:29 crc kubenswrapper[5070]: W1213 03:15:29.600164 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99ef6e31_9820_4f3c_9d75_10e8286eb467.slice/crio-17971138594bf8dd5865ff5ea9429e63995b77d03143b005b44203bc72e52095 WatchSource:0}: Error finding container 17971138594bf8dd5865ff5ea9429e63995b77d03143b005b44203bc72e52095: Status 404 returned error can't find the container with id 17971138594bf8dd5865ff5ea9429e63995b77d03143b005b44203bc72e52095 Dec 13 03:15:30 crc kubenswrapper[5070]: I1213 03:15:30.594190 5070 generic.go:334] "Generic (PLEG): container finished" podID="99ef6e31-9820-4f3c-9d75-10e8286eb467" containerID="7e45f1dc22ea9019a6e59acbecd1d229d94d3c6a51c25ece4d656b8a402dfb82" exitCode=0 Dec 13 03:15:30 crc kubenswrapper[5070]: I1213 03:15:30.594241 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db5z5" event={"ID":"99ef6e31-9820-4f3c-9d75-10e8286eb467","Type":"ContainerDied","Data":"7e45f1dc22ea9019a6e59acbecd1d229d94d3c6a51c25ece4d656b8a402dfb82"} Dec 13 03:15:30 crc kubenswrapper[5070]: I1213 03:15:30.594611 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db5z5" event={"ID":"99ef6e31-9820-4f3c-9d75-10e8286eb467","Type":"ContainerStarted","Data":"17971138594bf8dd5865ff5ea9429e63995b77d03143b005b44203bc72e52095"} Dec 13 03:15:30 crc kubenswrapper[5070]: I1213 03:15:30.596268 5070 generic.go:334] "Generic (PLEG): container finished" podID="6cee5946-a6c3-4d8e-a6f3-7200c45c3a98" containerID="2a78ca5b0129692363fcdfc24e244fd79903045ed8d7706c5922bcce24126f32" exitCode=0 Dec 13 03:15:30 crc kubenswrapper[5070]: I1213 03:15:30.596299 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dbtp" event={"ID":"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98","Type":"ContainerDied","Data":"2a78ca5b0129692363fcdfc24e244fd79903045ed8d7706c5922bcce24126f32"} Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.109204 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xmckj"] Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.110542 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.113929 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.116340 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xmckj"] Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.266986 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vf4p\" (UniqueName: \"kubernetes.io/projected/e3984f87-e746-4d49-95d2-acca4c05400a-kube-api-access-5vf4p\") pod \"redhat-marketplace-xmckj\" (UID: \"e3984f87-e746-4d49-95d2-acca4c05400a\") " pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.267064 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3984f87-e746-4d49-95d2-acca4c05400a-catalog-content\") pod \"redhat-marketplace-xmckj\" (UID: \"e3984f87-e746-4d49-95d2-acca4c05400a\") " pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.267108 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3984f87-e746-4d49-95d2-acca4c05400a-utilities\") pod \"redhat-marketplace-xmckj\" (UID: \"e3984f87-e746-4d49-95d2-acca4c05400a\") " pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.284941 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6lzk5"] Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.285925 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.287757 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.300040 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6lzk5"] Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.369107 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91cbae40-2fa3-4c39-acd8-43cc2c23d902-utilities\") pod \"redhat-operators-6lzk5\" (UID: \"91cbae40-2fa3-4c39-acd8-43cc2c23d902\") " pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.369180 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91cbae40-2fa3-4c39-acd8-43cc2c23d902-catalog-content\") pod \"redhat-operators-6lzk5\" (UID: \"91cbae40-2fa3-4c39-acd8-43cc2c23d902\") " pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.369228 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vf4p\" (UniqueName: \"kubernetes.io/projected/e3984f87-e746-4d49-95d2-acca4c05400a-kube-api-access-5vf4p\") pod \"redhat-marketplace-xmckj\" (UID: \"e3984f87-e746-4d49-95d2-acca4c05400a\") " pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.369266 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3984f87-e746-4d49-95d2-acca4c05400a-catalog-content\") pod \"redhat-marketplace-xmckj\" (UID: \"e3984f87-e746-4d49-95d2-acca4c05400a\") " pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.369360 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3984f87-e746-4d49-95d2-acca4c05400a-utilities\") pod \"redhat-marketplace-xmckj\" (UID: \"e3984f87-e746-4d49-95d2-acca4c05400a\") " pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.369400 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd4kt\" (UniqueName: \"kubernetes.io/projected/91cbae40-2fa3-4c39-acd8-43cc2c23d902-kube-api-access-rd4kt\") pod \"redhat-operators-6lzk5\" (UID: \"91cbae40-2fa3-4c39-acd8-43cc2c23d902\") " pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.370137 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3984f87-e746-4d49-95d2-acca4c05400a-catalog-content\") pod \"redhat-marketplace-xmckj\" (UID: \"e3984f87-e746-4d49-95d2-acca4c05400a\") " pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.370369 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3984f87-e746-4d49-95d2-acca4c05400a-utilities\") pod \"redhat-marketplace-xmckj\" (UID: \"e3984f87-e746-4d49-95d2-acca4c05400a\") " pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.389320 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vf4p\" (UniqueName: \"kubernetes.io/projected/e3984f87-e746-4d49-95d2-acca4c05400a-kube-api-access-5vf4p\") pod \"redhat-marketplace-xmckj\" (UID: \"e3984f87-e746-4d49-95d2-acca4c05400a\") " pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.429385 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.475905 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91cbae40-2fa3-4c39-acd8-43cc2c23d902-utilities\") pod \"redhat-operators-6lzk5\" (UID: \"91cbae40-2fa3-4c39-acd8-43cc2c23d902\") " pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.475981 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91cbae40-2fa3-4c39-acd8-43cc2c23d902-catalog-content\") pod \"redhat-operators-6lzk5\" (UID: \"91cbae40-2fa3-4c39-acd8-43cc2c23d902\") " pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.476046 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd4kt\" (UniqueName: \"kubernetes.io/projected/91cbae40-2fa3-4c39-acd8-43cc2c23d902-kube-api-access-rd4kt\") pod \"redhat-operators-6lzk5\" (UID: \"91cbae40-2fa3-4c39-acd8-43cc2c23d902\") " pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.477384 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91cbae40-2fa3-4c39-acd8-43cc2c23d902-utilities\") pod \"redhat-operators-6lzk5\" (UID: \"91cbae40-2fa3-4c39-acd8-43cc2c23d902\") " pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.477392 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91cbae40-2fa3-4c39-acd8-43cc2c23d902-catalog-content\") pod \"redhat-operators-6lzk5\" (UID: \"91cbae40-2fa3-4c39-acd8-43cc2c23d902\") " pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.499532 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd4kt\" (UniqueName: \"kubernetes.io/projected/91cbae40-2fa3-4c39-acd8-43cc2c23d902-kube-api-access-rd4kt\") pod \"redhat-operators-6lzk5\" (UID: \"91cbae40-2fa3-4c39-acd8-43cc2c23d902\") " pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:31 crc kubenswrapper[5070]: I1213 03:15:31.598316 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:32 crc kubenswrapper[5070]: I1213 03:15:32.245539 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6lzk5"] Dec 13 03:15:32 crc kubenswrapper[5070]: I1213 03:15:32.288065 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xmckj"] Dec 13 03:15:32 crc kubenswrapper[5070]: W1213 03:15:32.342526 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3984f87_e746_4d49_95d2_acca4c05400a.slice/crio-a605601e3d795cc24947fde7349d07ee1733300bb969a65e1c7d45100c3bd9da WatchSource:0}: Error finding container a605601e3d795cc24947fde7349d07ee1733300bb969a65e1c7d45100c3bd9da: Status 404 returned error can't find the container with id a605601e3d795cc24947fde7349d07ee1733300bb969a65e1c7d45100c3bd9da Dec 13 03:15:32 crc kubenswrapper[5070]: I1213 03:15:32.607175 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lzk5" event={"ID":"91cbae40-2fa3-4c39-acd8-43cc2c23d902","Type":"ContainerStarted","Data":"1783d4a255f8d83a796a7ccd5b1503f9e1e425ad503274d4074bf36228dee4e5"} Dec 13 03:15:32 crc kubenswrapper[5070]: I1213 03:15:32.608722 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xmckj" event={"ID":"e3984f87-e746-4d49-95d2-acca4c05400a","Type":"ContainerStarted","Data":"a605601e3d795cc24947fde7349d07ee1733300bb969a65e1c7d45100c3bd9da"} Dec 13 03:15:32 crc kubenswrapper[5070]: I1213 03:15:32.619148 5070 generic.go:334] "Generic (PLEG): container finished" podID="99ef6e31-9820-4f3c-9d75-10e8286eb467" containerID="850d7bc3f2542902cf4a992d1d5b8eef73ae3e1f567c7b678a8904bc85791a59" exitCode=0 Dec 13 03:15:32 crc kubenswrapper[5070]: I1213 03:15:32.619232 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db5z5" event={"ID":"99ef6e31-9820-4f3c-9d75-10e8286eb467","Type":"ContainerDied","Data":"850d7bc3f2542902cf4a992d1d5b8eef73ae3e1f567c7b678a8904bc85791a59"} Dec 13 03:15:32 crc kubenswrapper[5070]: I1213 03:15:32.626604 5070 generic.go:334] "Generic (PLEG): container finished" podID="6cee5946-a6c3-4d8e-a6f3-7200c45c3a98" containerID="f458775a789f682a1f90da2f4813edc92ff4ea83c2ce8c70d89c546ea1b3fdf0" exitCode=0 Dec 13 03:15:32 crc kubenswrapper[5070]: I1213 03:15:32.626709 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dbtp" event={"ID":"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98","Type":"ContainerDied","Data":"f458775a789f682a1f90da2f4813edc92ff4ea83c2ce8c70d89c546ea1b3fdf0"} Dec 13 03:15:33 crc kubenswrapper[5070]: I1213 03:15:33.634287 5070 generic.go:334] "Generic (PLEG): container finished" podID="e3984f87-e746-4d49-95d2-acca4c05400a" containerID="54aefd0d64f30047962e908b35c5abcc9711ee1b035e123b70cf2b2930c4e87d" exitCode=0 Dec 13 03:15:33 crc kubenswrapper[5070]: I1213 03:15:33.634470 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xmckj" event={"ID":"e3984f87-e746-4d49-95d2-acca4c05400a","Type":"ContainerDied","Data":"54aefd0d64f30047962e908b35c5abcc9711ee1b035e123b70cf2b2930c4e87d"} Dec 13 03:15:33 crc kubenswrapper[5070]: I1213 03:15:33.637617 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db5z5" event={"ID":"99ef6e31-9820-4f3c-9d75-10e8286eb467","Type":"ContainerStarted","Data":"b13f349e0ce0de8826494b21d87a8a76cf7fefee62616cf6f235364dd56f8811"} Dec 13 03:15:33 crc kubenswrapper[5070]: I1213 03:15:33.639109 5070 generic.go:334] "Generic (PLEG): container finished" podID="91cbae40-2fa3-4c39-acd8-43cc2c23d902" containerID="5bc9436ab5052dc14a00ed04acc026acfe845716bf6f024b783583350ddc2916" exitCode=0 Dec 13 03:15:33 crc kubenswrapper[5070]: I1213 03:15:33.639141 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lzk5" event={"ID":"91cbae40-2fa3-4c39-acd8-43cc2c23d902","Type":"ContainerDied","Data":"5bc9436ab5052dc14a00ed04acc026acfe845716bf6f024b783583350ddc2916"} Dec 13 03:15:33 crc kubenswrapper[5070]: I1213 03:15:33.677178 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-db5z5" podStartSLOduration=2.917113553 podStartE2EDuration="5.677163237s" podCreationTimestamp="2025-12-13 03:15:28 +0000 UTC" firstStartedPulling="2025-12-13 03:15:30.597068232 +0000 UTC m=+222.832911778" lastFinishedPulling="2025-12-13 03:15:33.357117906 +0000 UTC m=+225.592961462" observedRunningTime="2025-12-13 03:15:33.673382979 +0000 UTC m=+225.909226535" watchObservedRunningTime="2025-12-13 03:15:33.677163237 +0000 UTC m=+225.913006783" Dec 13 03:15:34 crc kubenswrapper[5070]: I1213 03:15:34.648463 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dbtp" event={"ID":"6cee5946-a6c3-4d8e-a6f3-7200c45c3a98","Type":"ContainerStarted","Data":"f576e349da06043788c9c86e56cbf5f733618f50bc599df58473000f6767d5f0"} Dec 13 03:15:34 crc kubenswrapper[5070]: I1213 03:15:34.669128 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2dbtp" podStartSLOduration=3.74072262 podStartE2EDuration="6.669075064s" podCreationTimestamp="2025-12-13 03:15:28 +0000 UTC" firstStartedPulling="2025-12-13 03:15:30.598650177 +0000 UTC m=+222.834493723" lastFinishedPulling="2025-12-13 03:15:33.527002621 +0000 UTC m=+225.762846167" observedRunningTime="2025-12-13 03:15:34.66822529 +0000 UTC m=+226.904068846" watchObservedRunningTime="2025-12-13 03:15:34.669075064 +0000 UTC m=+226.904918620" Dec 13 03:15:35 crc kubenswrapper[5070]: I1213 03:15:35.655137 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lzk5" event={"ID":"91cbae40-2fa3-4c39-acd8-43cc2c23d902","Type":"ContainerStarted","Data":"c7209db28bb13c429b6353365e7d89c27c4ea1440527c2f974e654fde790f045"} Dec 13 03:15:35 crc kubenswrapper[5070]: I1213 03:15:35.656959 5070 generic.go:334] "Generic (PLEG): container finished" podID="e3984f87-e746-4d49-95d2-acca4c05400a" containerID="b4f14060e16a5b376980320b0ef86fb0981b9e043cd59b47147364aad25769ab" exitCode=0 Dec 13 03:15:35 crc kubenswrapper[5070]: I1213 03:15:35.657203 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xmckj" event={"ID":"e3984f87-e746-4d49-95d2-acca4c05400a","Type":"ContainerDied","Data":"b4f14060e16a5b376980320b0ef86fb0981b9e043cd59b47147364aad25769ab"} Dec 13 03:15:36 crc kubenswrapper[5070]: E1213 03:15:36.013163 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91cbae40_2fa3_4c39_acd8_43cc2c23d902.slice/crio-conmon-c7209db28bb13c429b6353365e7d89c27c4ea1440527c2f974e654fde790f045.scope\": RecentStats: unable to find data in memory cache]" Dec 13 03:15:36 crc kubenswrapper[5070]: I1213 03:15:36.663802 5070 generic.go:334] "Generic (PLEG): container finished" podID="91cbae40-2fa3-4c39-acd8-43cc2c23d902" containerID="c7209db28bb13c429b6353365e7d89c27c4ea1440527c2f974e654fde790f045" exitCode=0 Dec 13 03:15:36 crc kubenswrapper[5070]: I1213 03:15:36.664088 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lzk5" event={"ID":"91cbae40-2fa3-4c39-acd8-43cc2c23d902","Type":"ContainerDied","Data":"c7209db28bb13c429b6353365e7d89c27c4ea1440527c2f974e654fde790f045"} Dec 13 03:15:36 crc kubenswrapper[5070]: I1213 03:15:36.666626 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xmckj" event={"ID":"e3984f87-e746-4d49-95d2-acca4c05400a","Type":"ContainerStarted","Data":"57c76f4148abbb559f1640fdf883d3e745cc735afb4ccc8fbf8c8f2c2fa36f2f"} Dec 13 03:15:36 crc kubenswrapper[5070]: I1213 03:15:36.699458 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xmckj" podStartSLOduration=3.242119631 podStartE2EDuration="5.699422175s" podCreationTimestamp="2025-12-13 03:15:31 +0000 UTC" firstStartedPulling="2025-12-13 03:15:33.636680985 +0000 UTC m=+225.872524531" lastFinishedPulling="2025-12-13 03:15:36.093983529 +0000 UTC m=+228.329827075" observedRunningTime="2025-12-13 03:15:36.697898121 +0000 UTC m=+228.933741697" watchObservedRunningTime="2025-12-13 03:15:36.699422175 +0000 UTC m=+228.935265721" Dec 13 03:15:39 crc kubenswrapper[5070]: I1213 03:15:39.013788 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:39 crc kubenswrapper[5070]: I1213 03:15:39.014206 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:39 crc kubenswrapper[5070]: I1213 03:15:39.075812 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:39 crc kubenswrapper[5070]: I1213 03:15:39.201653 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:39 crc kubenswrapper[5070]: I1213 03:15:39.201741 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:39 crc kubenswrapper[5070]: I1213 03:15:39.254680 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:39 crc kubenswrapper[5070]: I1213 03:15:39.716180 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-db5z5" Dec 13 03:15:39 crc kubenswrapper[5070]: I1213 03:15:39.729550 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2dbtp" Dec 13 03:15:41 crc kubenswrapper[5070]: I1213 03:15:41.430308 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:41 crc kubenswrapper[5070]: I1213 03:15:41.430630 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:41 crc kubenswrapper[5070]: I1213 03:15:41.474226 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:41 crc kubenswrapper[5070]: I1213 03:15:41.731181 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xmckj" Dec 13 03:15:43 crc kubenswrapper[5070]: I1213 03:15:43.713280 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lzk5" event={"ID":"91cbae40-2fa3-4c39-acd8-43cc2c23d902","Type":"ContainerStarted","Data":"7dede77670651ec076f47163f688ace5aaf56a9636dabb28f729fe7d28d21dd3"} Dec 13 03:15:43 crc kubenswrapper[5070]: I1213 03:15:43.732082 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6lzk5" podStartSLOduration=3.35523185 podStartE2EDuration="12.732062701s" podCreationTimestamp="2025-12-13 03:15:31 +0000 UTC" firstStartedPulling="2025-12-13 03:15:33.641001947 +0000 UTC m=+225.876845483" lastFinishedPulling="2025-12-13 03:15:43.017832788 +0000 UTC m=+235.253676334" observedRunningTime="2025-12-13 03:15:43.728057157 +0000 UTC m=+235.963900783" watchObservedRunningTime="2025-12-13 03:15:43.732062701 +0000 UTC m=+235.967906257" Dec 13 03:15:51 crc kubenswrapper[5070]: I1213 03:15:51.599379 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:51 crc kubenswrapper[5070]: I1213 03:15:51.601187 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:51 crc kubenswrapper[5070]: I1213 03:15:51.637691 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:51 crc kubenswrapper[5070]: I1213 03:15:51.798017 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6lzk5" Dec 13 03:15:51 crc kubenswrapper[5070]: I1213 03:15:51.942629 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:15:51 crc kubenswrapper[5070]: I1213 03:15:51.942679 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:15:51 crc kubenswrapper[5070]: I1213 03:15:51.942719 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:15:51 crc kubenswrapper[5070]: I1213 03:15:51.943204 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"63922f5dc6cb463f08e809ea5fc2f1cd1f0d5a154bfaeed55a81f138c2ba391a"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 03:15:51 crc kubenswrapper[5070]: I1213 03:15:51.943251 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://63922f5dc6cb463f08e809ea5fc2f1cd1f0d5a154bfaeed55a81f138c2ba391a" gracePeriod=600 Dec 13 03:15:58 crc kubenswrapper[5070]: I1213 03:15:58.250054 5070 patch_prober.go:28] interesting pod/authentication-operator-69f744f599-k6vpl container/authentication-operator namespace/openshift-authentication-operator: Liveness probe status=failure output="Get \"https://10.217.0.17:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 03:15:58 crc kubenswrapper[5070]: I1213 03:15:58.250722 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" podUID="bbdcfa81-b48d-4067-af2e-0de54cea8c7e" containerName="authentication-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 13 03:15:59 crc kubenswrapper[5070]: I1213 03:15:59.530671 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="63922f5dc6cb463f08e809ea5fc2f1cd1f0d5a154bfaeed55a81f138c2ba391a" exitCode=0 Dec 13 03:15:59 crc kubenswrapper[5070]: I1213 03:15:59.530780 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"63922f5dc6cb463f08e809ea5fc2f1cd1f0d5a154bfaeed55a81f138c2ba391a"} Dec 13 03:16:00 crc kubenswrapper[5070]: I1213 03:16:00.537039 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"4e0db15645022c250ea5ee329cdc3679d197f7e9c4f22c7d342a54394aac732f"} Dec 13 03:16:21 crc kubenswrapper[5070]: I1213 03:16:21.482625 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-lp7jr"] Dec 13 03:16:21 crc kubenswrapper[5070]: I1213 03:16:21.483391 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" podUID="340a012b-be3a-4c31-83c3-f5d127cb5f19" containerName="controller-manager" containerID="cri-o://739d6f8a771dc9373ebc4499443ccbc471486bf23b0e616c847ae4399444845c" gracePeriod=30 Dec 13 03:16:21 crc kubenswrapper[5070]: I1213 03:16:21.650931 5070 generic.go:334] "Generic (PLEG): container finished" podID="340a012b-be3a-4c31-83c3-f5d127cb5f19" containerID="739d6f8a771dc9373ebc4499443ccbc471486bf23b0e616c847ae4399444845c" exitCode=0 Dec 13 03:16:21 crc kubenswrapper[5070]: I1213 03:16:21.651014 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" event={"ID":"340a012b-be3a-4c31-83c3-f5d127cb5f19","Type":"ContainerDied","Data":"739d6f8a771dc9373ebc4499443ccbc471486bf23b0e616c847ae4399444845c"} Dec 13 03:16:21 crc kubenswrapper[5070]: I1213 03:16:21.880151 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.034017 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-client-ca\") pod \"340a012b-be3a-4c31-83c3-f5d127cb5f19\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.034086 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-config\") pod \"340a012b-be3a-4c31-83c3-f5d127cb5f19\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.034118 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/340a012b-be3a-4c31-83c3-f5d127cb5f19-serving-cert\") pod \"340a012b-be3a-4c31-83c3-f5d127cb5f19\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.034189 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-proxy-ca-bundles\") pod \"340a012b-be3a-4c31-83c3-f5d127cb5f19\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.034215 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7q7g9\" (UniqueName: \"kubernetes.io/projected/340a012b-be3a-4c31-83c3-f5d127cb5f19-kube-api-access-7q7g9\") pod \"340a012b-be3a-4c31-83c3-f5d127cb5f19\" (UID: \"340a012b-be3a-4c31-83c3-f5d127cb5f19\") " Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.035042 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-client-ca" (OuterVolumeSpecName: "client-ca") pod "340a012b-be3a-4c31-83c3-f5d127cb5f19" (UID: "340a012b-be3a-4c31-83c3-f5d127cb5f19"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.035063 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "340a012b-be3a-4c31-83c3-f5d127cb5f19" (UID: "340a012b-be3a-4c31-83c3-f5d127cb5f19"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.035120 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-config" (OuterVolumeSpecName: "config") pod "340a012b-be3a-4c31-83c3-f5d127cb5f19" (UID: "340a012b-be3a-4c31-83c3-f5d127cb5f19"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.039343 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/340a012b-be3a-4c31-83c3-f5d127cb5f19-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "340a012b-be3a-4c31-83c3-f5d127cb5f19" (UID: "340a012b-be3a-4c31-83c3-f5d127cb5f19"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.040032 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/340a012b-be3a-4c31-83c3-f5d127cb5f19-kube-api-access-7q7g9" (OuterVolumeSpecName: "kube-api-access-7q7g9") pod "340a012b-be3a-4c31-83c3-f5d127cb5f19" (UID: "340a012b-be3a-4c31-83c3-f5d127cb5f19"). InnerVolumeSpecName "kube-api-access-7q7g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.136099 5070 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.136155 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.136173 5070 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/340a012b-be3a-4c31-83c3-f5d127cb5f19-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.136191 5070 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/340a012b-be3a-4c31-83c3-f5d127cb5f19-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.136210 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7q7g9\" (UniqueName: \"kubernetes.io/projected/340a012b-be3a-4c31-83c3-f5d127cb5f19-kube-api-access-7q7g9\") on node \"crc\" DevicePath \"\"" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.657219 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.657220 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c476db947-lp7jr" event={"ID":"340a012b-be3a-4c31-83c3-f5d127cb5f19","Type":"ContainerDied","Data":"2e99f5a6a76e12ee52dc88dab846c6656e8b1ec79381a4fbbc3f1d386bd57281"} Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.657350 5070 scope.go:117] "RemoveContainer" containerID="739d6f8a771dc9373ebc4499443ccbc471486bf23b0e616c847ae4399444845c" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.674854 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5bf54df75-jf7bq"] Dec 13 03:16:22 crc kubenswrapper[5070]: E1213 03:16:22.675163 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="340a012b-be3a-4c31-83c3-f5d127cb5f19" containerName="controller-manager" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.675178 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="340a012b-be3a-4c31-83c3-f5d127cb5f19" containerName="controller-manager" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.675323 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="340a012b-be3a-4c31-83c3-f5d127cb5f19" containerName="controller-manager" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.675923 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.679140 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.679954 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.680167 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.680290 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.681165 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.681282 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.682505 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-lp7jr"] Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.684806 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.684975 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-lp7jr"] Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.689002 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5bf54df75-jf7bq"] Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.845712 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f1a1f9cb-98d8-45f4-8a99-951e4237452c-proxy-ca-bundles\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.845859 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f1a1f9cb-98d8-45f4-8a99-951e4237452c-client-ca\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.845903 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1a1f9cb-98d8-45f4-8a99-951e4237452c-config\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.845950 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdntl\" (UniqueName: \"kubernetes.io/projected/f1a1f9cb-98d8-45f4-8a99-951e4237452c-kube-api-access-qdntl\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.845977 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1a1f9cb-98d8-45f4-8a99-951e4237452c-serving-cert\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.947106 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f1a1f9cb-98d8-45f4-8a99-951e4237452c-proxy-ca-bundles\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.947198 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f1a1f9cb-98d8-45f4-8a99-951e4237452c-client-ca\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.947230 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1a1f9cb-98d8-45f4-8a99-951e4237452c-config\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.947254 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdntl\" (UniqueName: \"kubernetes.io/projected/f1a1f9cb-98d8-45f4-8a99-951e4237452c-kube-api-access-qdntl\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.947279 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1a1f9cb-98d8-45f4-8a99-951e4237452c-serving-cert\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.948130 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f1a1f9cb-98d8-45f4-8a99-951e4237452c-client-ca\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.948535 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1a1f9cb-98d8-45f4-8a99-951e4237452c-config\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.948684 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f1a1f9cb-98d8-45f4-8a99-951e4237452c-proxy-ca-bundles\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.962071 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1a1f9cb-98d8-45f4-8a99-951e4237452c-serving-cert\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:22 crc kubenswrapper[5070]: I1213 03:16:22.965581 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdntl\" (UniqueName: \"kubernetes.io/projected/f1a1f9cb-98d8-45f4-8a99-951e4237452c-kube-api-access-qdntl\") pod \"controller-manager-5bf54df75-jf7bq\" (UID: \"f1a1f9cb-98d8-45f4-8a99-951e4237452c\") " pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:23 crc kubenswrapper[5070]: I1213 03:16:23.006045 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:23 crc kubenswrapper[5070]: I1213 03:16:23.203127 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5bf54df75-jf7bq"] Dec 13 03:16:23 crc kubenswrapper[5070]: I1213 03:16:23.672617 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" event={"ID":"f1a1f9cb-98d8-45f4-8a99-951e4237452c","Type":"ContainerStarted","Data":"dd382dc100b1c4ce2bef81ce7c0fa7203d4baf86c909aa0087c25feb5c950a09"} Dec 13 03:16:23 crc kubenswrapper[5070]: I1213 03:16:23.672960 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" event={"ID":"f1a1f9cb-98d8-45f4-8a99-951e4237452c","Type":"ContainerStarted","Data":"cc778961335578c3ee360239b5f42377fbf93fc78ec344ccb689010342ca0615"} Dec 13 03:16:23 crc kubenswrapper[5070]: I1213 03:16:23.673620 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:23 crc kubenswrapper[5070]: I1213 03:16:23.682702 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" Dec 13 03:16:23 crc kubenswrapper[5070]: I1213 03:16:23.697478 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5bf54df75-jf7bq" podStartSLOduration=2.6974573619999997 podStartE2EDuration="2.697457362s" podCreationTimestamp="2025-12-13 03:16:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:16:23.693519832 +0000 UTC m=+275.929363378" watchObservedRunningTime="2025-12-13 03:16:23.697457362 +0000 UTC m=+275.933300908" Dec 13 03:16:24 crc kubenswrapper[5070]: I1213 03:16:24.174347 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="340a012b-be3a-4c31-83c3-f5d127cb5f19" path="/var/lib/kubelet/pods/340a012b-be3a-4c31-83c3-f5d127cb5f19/volumes" Dec 13 03:16:24 crc kubenswrapper[5070]: I1213 03:16:24.977934 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hn8tq"] Dec 13 03:16:24 crc kubenswrapper[5070]: I1213 03:16:24.978581 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.000182 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hn8tq"] Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.171081 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ccba948a-64ea-4632-992d-4832f664ae45-registry-certificates\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.171297 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ccba948a-64ea-4632-992d-4832f664ae45-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.171387 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6crc5\" (UniqueName: \"kubernetes.io/projected/ccba948a-64ea-4632-992d-4832f664ae45-kube-api-access-6crc5\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.171434 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ccba948a-64ea-4632-992d-4832f664ae45-bound-sa-token\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.171542 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ccba948a-64ea-4632-992d-4832f664ae45-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.171683 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.171808 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ccba948a-64ea-4632-992d-4832f664ae45-trusted-ca\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.171942 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ccba948a-64ea-4632-992d-4832f664ae45-registry-tls\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.196225 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.273204 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ccba948a-64ea-4632-992d-4832f664ae45-registry-certificates\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.273294 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ccba948a-64ea-4632-992d-4832f664ae45-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.273316 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6crc5\" (UniqueName: \"kubernetes.io/projected/ccba948a-64ea-4632-992d-4832f664ae45-kube-api-access-6crc5\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.273350 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ccba948a-64ea-4632-992d-4832f664ae45-bound-sa-token\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.273406 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ccba948a-64ea-4632-992d-4832f664ae45-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.273513 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ccba948a-64ea-4632-992d-4832f664ae45-trusted-ca\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.273564 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ccba948a-64ea-4632-992d-4832f664ae45-registry-tls\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.275247 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ccba948a-64ea-4632-992d-4832f664ae45-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.276745 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ccba948a-64ea-4632-992d-4832f664ae45-trusted-ca\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.278271 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ccba948a-64ea-4632-992d-4832f664ae45-registry-certificates\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.282636 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ccba948a-64ea-4632-992d-4832f664ae45-registry-tls\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.282727 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ccba948a-64ea-4632-992d-4832f664ae45-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.302752 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ccba948a-64ea-4632-992d-4832f664ae45-bound-sa-token\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.302920 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6crc5\" (UniqueName: \"kubernetes.io/projected/ccba948a-64ea-4632-992d-4832f664ae45-kube-api-access-6crc5\") pod \"image-registry-66df7c8f76-hn8tq\" (UID: \"ccba948a-64ea-4632-992d-4832f664ae45\") " pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:25 crc kubenswrapper[5070]: I1213 03:16:25.601919 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:26 crc kubenswrapper[5070]: I1213 03:16:26.004389 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hn8tq"] Dec 13 03:16:26 crc kubenswrapper[5070]: W1213 03:16:26.012122 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podccba948a_64ea_4632_992d_4832f664ae45.slice/crio-e97019995e0854ee5fbe69032a6780c6bbcd71315cbf3b7347748e10b409176f WatchSource:0}: Error finding container e97019995e0854ee5fbe69032a6780c6bbcd71315cbf3b7347748e10b409176f: Status 404 returned error can't find the container with id e97019995e0854ee5fbe69032a6780c6bbcd71315cbf3b7347748e10b409176f Dec 13 03:16:26 crc kubenswrapper[5070]: I1213 03:16:26.695999 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" event={"ID":"ccba948a-64ea-4632-992d-4832f664ae45","Type":"ContainerStarted","Data":"4a5547e239ff20ef402c6dc01943745d3f6e251b5b17e444350dcd74551bdbdb"} Dec 13 03:16:26 crc kubenswrapper[5070]: I1213 03:16:26.696552 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:26 crc kubenswrapper[5070]: I1213 03:16:26.696638 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" event={"ID":"ccba948a-64ea-4632-992d-4832f664ae45","Type":"ContainerStarted","Data":"e97019995e0854ee5fbe69032a6780c6bbcd71315cbf3b7347748e10b409176f"} Dec 13 03:16:26 crc kubenswrapper[5070]: I1213 03:16:26.716885 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" podStartSLOduration=2.71686885 podStartE2EDuration="2.71686885s" podCreationTimestamp="2025-12-13 03:16:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:16:26.713421754 +0000 UTC m=+278.949265310" watchObservedRunningTime="2025-12-13 03:16:26.71686885 +0000 UTC m=+278.952712386" Dec 13 03:16:45 crc kubenswrapper[5070]: I1213 03:16:45.607403 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-hn8tq" Dec 13 03:16:45 crc kubenswrapper[5070]: I1213 03:16:45.662311 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xkp8n"] Dec 13 03:17:10 crc kubenswrapper[5070]: I1213 03:17:10.700514 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" podUID="472601ba-cee2-4f6e-ac53-a5606ef0469f" containerName="registry" containerID="cri-o://72f7f5fc3896b38a456efbaf34e0d33b80ea467dddb8f144d01bff0876327ec6" gracePeriod=30 Dec 13 03:17:10 crc kubenswrapper[5070]: I1213 03:17:10.958184 5070 generic.go:334] "Generic (PLEG): container finished" podID="472601ba-cee2-4f6e-ac53-a5606ef0469f" containerID="72f7f5fc3896b38a456efbaf34e0d33b80ea467dddb8f144d01bff0876327ec6" exitCode=0 Dec 13 03:17:10 crc kubenswrapper[5070]: I1213 03:17:10.958568 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" event={"ID":"472601ba-cee2-4f6e-ac53-a5606ef0469f","Type":"ContainerDied","Data":"72f7f5fc3896b38a456efbaf34e0d33b80ea467dddb8f144d01bff0876327ec6"} Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.069844 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.212928 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-bound-sa-token\") pod \"472601ba-cee2-4f6e-ac53-a5606ef0469f\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.213006 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7tpn\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-kube-api-access-q7tpn\") pod \"472601ba-cee2-4f6e-ac53-a5606ef0469f\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.213046 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/472601ba-cee2-4f6e-ac53-a5606ef0469f-installation-pull-secrets\") pod \"472601ba-cee2-4f6e-ac53-a5606ef0469f\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.213085 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-tls\") pod \"472601ba-cee2-4f6e-ac53-a5606ef0469f\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.213238 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"472601ba-cee2-4f6e-ac53-a5606ef0469f\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.213268 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-trusted-ca\") pod \"472601ba-cee2-4f6e-ac53-a5606ef0469f\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.213333 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/472601ba-cee2-4f6e-ac53-a5606ef0469f-ca-trust-extracted\") pod \"472601ba-cee2-4f6e-ac53-a5606ef0469f\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.213366 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-certificates\") pod \"472601ba-cee2-4f6e-ac53-a5606ef0469f\" (UID: \"472601ba-cee2-4f6e-ac53-a5606ef0469f\") " Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.214174 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "472601ba-cee2-4f6e-ac53-a5606ef0469f" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.214239 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "472601ba-cee2-4f6e-ac53-a5606ef0469f" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.219818 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/472601ba-cee2-4f6e-ac53-a5606ef0469f-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "472601ba-cee2-4f6e-ac53-a5606ef0469f" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.221052 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "472601ba-cee2-4f6e-ac53-a5606ef0469f" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.223399 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "472601ba-cee2-4f6e-ac53-a5606ef0469f" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.231144 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/472601ba-cee2-4f6e-ac53-a5606ef0469f-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "472601ba-cee2-4f6e-ac53-a5606ef0469f" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.236177 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "472601ba-cee2-4f6e-ac53-a5606ef0469f" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.237094 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-kube-api-access-q7tpn" (OuterVolumeSpecName: "kube-api-access-q7tpn") pod "472601ba-cee2-4f6e-ac53-a5606ef0469f" (UID: "472601ba-cee2-4f6e-ac53-a5606ef0469f"). InnerVolumeSpecName "kube-api-access-q7tpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.315485 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7tpn\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-kube-api-access-q7tpn\") on node \"crc\" DevicePath \"\"" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.315530 5070 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/472601ba-cee2-4f6e-ac53-a5606ef0469f-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.315545 5070 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.315557 5070 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.315570 5070 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/472601ba-cee2-4f6e-ac53-a5606ef0469f-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.315578 5070 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/472601ba-cee2-4f6e-ac53-a5606ef0469f-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.315586 5070 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/472601ba-cee2-4f6e-ac53-a5606ef0469f-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.967801 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" event={"ID":"472601ba-cee2-4f6e-ac53-a5606ef0469f","Type":"ContainerDied","Data":"695c93ed41796d06f3f3e40bb3a524d05eafd286f3d72329296fd20a8c146ff8"} Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.968137 5070 scope.go:117] "RemoveContainer" containerID="72f7f5fc3896b38a456efbaf34e0d33b80ea467dddb8f144d01bff0876327ec6" Dec 13 03:17:11 crc kubenswrapper[5070]: I1213 03:17:11.968049 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xkp8n" Dec 13 03:17:12 crc kubenswrapper[5070]: I1213 03:17:12.001658 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xkp8n"] Dec 13 03:17:12 crc kubenswrapper[5070]: I1213 03:17:12.005504 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xkp8n"] Dec 13 03:17:12 crc kubenswrapper[5070]: I1213 03:17:12.173643 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="472601ba-cee2-4f6e-ac53-a5606ef0469f" path="/var/lib/kubelet/pods/472601ba-cee2-4f6e-ac53-a5606ef0469f/volumes" Dec 13 03:18:21 crc kubenswrapper[5070]: I1213 03:18:21.943041 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:18:21 crc kubenswrapper[5070]: I1213 03:18:21.944091 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:18:51 crc kubenswrapper[5070]: I1213 03:18:51.943357 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:18:51 crc kubenswrapper[5070]: I1213 03:18:51.944154 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:19:21 crc kubenswrapper[5070]: I1213 03:19:21.943521 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:19:21 crc kubenswrapper[5070]: I1213 03:19:21.944259 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:19:21 crc kubenswrapper[5070]: I1213 03:19:21.944341 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:19:21 crc kubenswrapper[5070]: I1213 03:19:21.945330 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4e0db15645022c250ea5ee329cdc3679d197f7e9c4f22c7d342a54394aac732f"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 03:19:21 crc kubenswrapper[5070]: I1213 03:19:21.945495 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://4e0db15645022c250ea5ee329cdc3679d197f7e9c4f22c7d342a54394aac732f" gracePeriod=600 Dec 13 03:19:22 crc kubenswrapper[5070]: I1213 03:19:22.794754 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="4e0db15645022c250ea5ee329cdc3679d197f7e9c4f22c7d342a54394aac732f" exitCode=0 Dec 13 03:19:22 crc kubenswrapper[5070]: I1213 03:19:22.794815 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"4e0db15645022c250ea5ee329cdc3679d197f7e9c4f22c7d342a54394aac732f"} Dec 13 03:19:22 crc kubenswrapper[5070]: I1213 03:19:22.795403 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"b5e0b24c2b047322a8bb0e293689c3f0d2f12b10fd38f01969f32e9cc47932b2"} Dec 13 03:19:22 crc kubenswrapper[5070]: I1213 03:19:22.795433 5070 scope.go:117] "RemoveContainer" containerID="63922f5dc6cb463f08e809ea5fc2f1cd1f0d5a154bfaeed55a81f138c2ba391a" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.627759 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-k6njx"] Dec 13 03:21:47 crc kubenswrapper[5070]: E1213 03:21:47.629244 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="472601ba-cee2-4f6e-ac53-a5606ef0469f" containerName="registry" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.629307 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="472601ba-cee2-4f6e-ac53-a5606ef0469f" containerName="registry" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.629453 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="472601ba-cee2-4f6e-ac53-a5606ef0469f" containerName="registry" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.630039 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-k6njx" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.632897 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.633268 5070 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-7q2kc" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.633268 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.646562 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-k6njx"] Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.651863 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-sql5c"] Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.652890 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-sql5c" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.654545 5070 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-wbtcz" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.676932 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-sql5c"] Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.693293 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wjw2w"] Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.694188 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjw2w" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.695693 5070 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-9lwvj" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.711808 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wjw2w"] Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.788150 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghtqz\" (UniqueName: \"kubernetes.io/projected/13ca830e-d979-4549-8043-df7846a52f28-kube-api-access-ghtqz\") pod \"cert-manager-5b446d88c5-sql5c\" (UID: \"13ca830e-d979-4549-8043-df7846a52f28\") " pod="cert-manager/cert-manager-5b446d88c5-sql5c" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.788204 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22psb\" (UniqueName: \"kubernetes.io/projected/79ea0824-5ff1-40ab-b605-4a299e403c62-kube-api-access-22psb\") pod \"cert-manager-cainjector-7f985d654d-k6njx\" (UID: \"79ea0824-5ff1-40ab-b605-4a299e403c62\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-k6njx" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.788258 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptv6c\" (UniqueName: \"kubernetes.io/projected/8d55d441-4092-4ea3-b7a8-3663e819124a-kube-api-access-ptv6c\") pod \"cert-manager-webhook-5655c58dd6-wjw2w\" (UID: \"8d55d441-4092-4ea3-b7a8-3663e819124a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wjw2w" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.888954 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghtqz\" (UniqueName: \"kubernetes.io/projected/13ca830e-d979-4549-8043-df7846a52f28-kube-api-access-ghtqz\") pod \"cert-manager-5b446d88c5-sql5c\" (UID: \"13ca830e-d979-4549-8043-df7846a52f28\") " pod="cert-manager/cert-manager-5b446d88c5-sql5c" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.889025 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22psb\" (UniqueName: \"kubernetes.io/projected/79ea0824-5ff1-40ab-b605-4a299e403c62-kube-api-access-22psb\") pod \"cert-manager-cainjector-7f985d654d-k6njx\" (UID: \"79ea0824-5ff1-40ab-b605-4a299e403c62\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-k6njx" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.889082 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptv6c\" (UniqueName: \"kubernetes.io/projected/8d55d441-4092-4ea3-b7a8-3663e819124a-kube-api-access-ptv6c\") pod \"cert-manager-webhook-5655c58dd6-wjw2w\" (UID: \"8d55d441-4092-4ea3-b7a8-3663e819124a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wjw2w" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.907792 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22psb\" (UniqueName: \"kubernetes.io/projected/79ea0824-5ff1-40ab-b605-4a299e403c62-kube-api-access-22psb\") pod \"cert-manager-cainjector-7f985d654d-k6njx\" (UID: \"79ea0824-5ff1-40ab-b605-4a299e403c62\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-k6njx" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.907908 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghtqz\" (UniqueName: \"kubernetes.io/projected/13ca830e-d979-4549-8043-df7846a52f28-kube-api-access-ghtqz\") pod \"cert-manager-5b446d88c5-sql5c\" (UID: \"13ca830e-d979-4549-8043-df7846a52f28\") " pod="cert-manager/cert-manager-5b446d88c5-sql5c" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.908767 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptv6c\" (UniqueName: \"kubernetes.io/projected/8d55d441-4092-4ea3-b7a8-3663e819124a-kube-api-access-ptv6c\") pod \"cert-manager-webhook-5655c58dd6-wjw2w\" (UID: \"8d55d441-4092-4ea3-b7a8-3663e819124a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wjw2w" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.947231 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-k6njx" Dec 13 03:21:47 crc kubenswrapper[5070]: I1213 03:21:47.971437 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-sql5c" Dec 13 03:21:48 crc kubenswrapper[5070]: I1213 03:21:48.014743 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjw2w" Dec 13 03:21:48 crc kubenswrapper[5070]: I1213 03:21:48.158165 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-k6njx"] Dec 13 03:21:48 crc kubenswrapper[5070]: I1213 03:21:48.173359 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 03:21:48 crc kubenswrapper[5070]: I1213 03:21:48.200188 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-sql5c"] Dec 13 03:21:48 crc kubenswrapper[5070]: E1213 03:21:48.201639 5070 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13ca830e_d979_4549_8043_df7846a52f28.slice/crio-80790458e6cdfb7d1b61633e81246ff51888245e94485d2b8aa40236a07d4426: Error finding container 80790458e6cdfb7d1b61633e81246ff51888245e94485d2b8aa40236a07d4426: Status 404 returned error can't find the container with id 80790458e6cdfb7d1b61633e81246ff51888245e94485d2b8aa40236a07d4426 Dec 13 03:21:48 crc kubenswrapper[5070]: I1213 03:21:48.253917 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wjw2w"] Dec 13 03:21:48 crc kubenswrapper[5070]: I1213 03:21:48.635116 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjw2w" event={"ID":"8d55d441-4092-4ea3-b7a8-3663e819124a","Type":"ContainerStarted","Data":"081ba979ab7444036ff594762e13772e827932c616c4f146bc8e03e14722d059"} Dec 13 03:21:48 crc kubenswrapper[5070]: I1213 03:21:48.636325 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-sql5c" event={"ID":"13ca830e-d979-4549-8043-df7846a52f28","Type":"ContainerStarted","Data":"80790458e6cdfb7d1b61633e81246ff51888245e94485d2b8aa40236a07d4426"} Dec 13 03:21:48 crc kubenswrapper[5070]: I1213 03:21:48.637188 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-k6njx" event={"ID":"79ea0824-5ff1-40ab-b605-4a299e403c62","Type":"ContainerStarted","Data":"c3c1f90582c53fce027f83d15e87ef876ed279e45fb463acf85310e32a37f6c3"} Dec 13 03:21:51 crc kubenswrapper[5070]: I1213 03:21:51.943367 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:21:51 crc kubenswrapper[5070]: I1213 03:21:51.943704 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:21:56 crc kubenswrapper[5070]: I1213 03:21:56.689097 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-sql5c" event={"ID":"13ca830e-d979-4549-8043-df7846a52f28","Type":"ContainerStarted","Data":"07e40ee1adf232db196639e8b07f211a4e9d9b575d08f6b27bffffbb4c946857"} Dec 13 03:21:56 crc kubenswrapper[5070]: I1213 03:21:56.691985 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-k6njx" event={"ID":"79ea0824-5ff1-40ab-b605-4a299e403c62","Type":"ContainerStarted","Data":"bd3298c9501997394437ed2142fa486d338b7ac28a6fd13d88e5b88380f674ca"} Dec 13 03:21:56 crc kubenswrapper[5070]: I1213 03:21:56.695196 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjw2w" event={"ID":"8d55d441-4092-4ea3-b7a8-3663e819124a","Type":"ContainerStarted","Data":"c917bee855ab11d499c62abf2909184335740eb1066cd2a2670835b26d15e89a"} Dec 13 03:21:56 crc kubenswrapper[5070]: I1213 03:21:56.695439 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjw2w" Dec 13 03:21:56 crc kubenswrapper[5070]: I1213 03:21:56.712150 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-sql5c" podStartSLOduration=1.7553896180000002 podStartE2EDuration="9.712136582s" podCreationTimestamp="2025-12-13 03:21:47 +0000 UTC" firstStartedPulling="2025-12-13 03:21:48.203656226 +0000 UTC m=+600.439499782" lastFinishedPulling="2025-12-13 03:21:56.1604032 +0000 UTC m=+608.396246746" observedRunningTime="2025-12-13 03:21:56.71132536 +0000 UTC m=+608.947168906" watchObservedRunningTime="2025-12-13 03:21:56.712136582 +0000 UTC m=+608.947980128" Dec 13 03:21:56 crc kubenswrapper[5070]: I1213 03:21:56.735278 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-k6njx" podStartSLOduration=1.53512724 podStartE2EDuration="9.735251456s" podCreationTimestamp="2025-12-13 03:21:47 +0000 UTC" firstStartedPulling="2025-12-13 03:21:48.172966306 +0000 UTC m=+600.408809852" lastFinishedPulling="2025-12-13 03:21:56.373090522 +0000 UTC m=+608.608934068" observedRunningTime="2025-12-13 03:21:56.724181373 +0000 UTC m=+608.960024929" watchObservedRunningTime="2025-12-13 03:21:56.735251456 +0000 UTC m=+608.971095042" Dec 13 03:21:56 crc kubenswrapper[5070]: I1213 03:21:56.751992 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjw2w" podStartSLOduration=1.860914648 podStartE2EDuration="9.751976074s" podCreationTimestamp="2025-12-13 03:21:47 +0000 UTC" firstStartedPulling="2025-12-13 03:21:48.269771626 +0000 UTC m=+600.505615172" lastFinishedPulling="2025-12-13 03:21:56.160833052 +0000 UTC m=+608.396676598" observedRunningTime="2025-12-13 03:21:56.749532196 +0000 UTC m=+608.985375743" watchObservedRunningTime="2025-12-13 03:21:56.751976074 +0000 UTC m=+608.987819630" Dec 13 03:21:57 crc kubenswrapper[5070]: I1213 03:21:57.782347 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7pmp8"] Dec 13 03:21:57 crc kubenswrapper[5070]: I1213 03:21:57.782805 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovn-controller" containerID="cri-o://d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3" gracePeriod=30 Dec 13 03:21:57 crc kubenswrapper[5070]: I1213 03:21:57.783155 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="sbdb" containerID="cri-o://ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41" gracePeriod=30 Dec 13 03:21:57 crc kubenswrapper[5070]: I1213 03:21:57.783204 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="nbdb" containerID="cri-o://4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c" gracePeriod=30 Dec 13 03:21:57 crc kubenswrapper[5070]: I1213 03:21:57.783248 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="northd" containerID="cri-o://16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4" gracePeriod=30 Dec 13 03:21:57 crc kubenswrapper[5070]: I1213 03:21:57.783324 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58" gracePeriod=30 Dec 13 03:21:57 crc kubenswrapper[5070]: I1213 03:21:57.783383 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="kube-rbac-proxy-node" containerID="cri-o://2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c" gracePeriod=30 Dec 13 03:21:57 crc kubenswrapper[5070]: I1213 03:21:57.783425 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovn-acl-logging" containerID="cri-o://941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f" gracePeriod=30 Dec 13 03:21:57 crc kubenswrapper[5070]: I1213 03:21:57.817911 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovnkube-controller" containerID="cri-o://91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead" gracePeriod=30 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.097337 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7pmp8_65d45c3c-cac7-4578-b0b5-05e546d8e356/ovn-acl-logging/0.log" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.098502 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7pmp8_65d45c3c-cac7-4578-b0b5-05e546d8e356/ovn-controller/0.log" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.099037 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.152556 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7qln4"] Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.152789 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovn-controller" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155383 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovn-controller" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.155407 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="northd" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155415 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="northd" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.155425 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="sbdb" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155436 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="sbdb" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.155475 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="kubecfg-setup" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155483 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="kubecfg-setup" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.155490 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="nbdb" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155498 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="nbdb" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.155510 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovn-acl-logging" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155517 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovn-acl-logging" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.155529 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="kube-rbac-proxy-ovn-metrics" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155537 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="kube-rbac-proxy-ovn-metrics" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.155545 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="kube-rbac-proxy-node" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155601 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="kube-rbac-proxy-node" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.155613 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovnkube-controller" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155620 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovnkube-controller" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155780 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="nbdb" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155789 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovnkube-controller" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155799 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="sbdb" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155809 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="kube-rbac-proxy-node" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155816 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovn-controller" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155824 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="ovn-acl-logging" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155831 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="northd" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.155840 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerName="kube-rbac-proxy-ovn-metrics" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.159123 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258492 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-node-log\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258537 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-netd\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258588 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-etc-openvswitch\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258613 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-slash\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258632 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-node-log" (OuterVolumeSpecName: "node-log") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258652 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovn-node-metrics-cert\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258675 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258725 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-netns\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258726 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-slash" (OuterVolumeSpecName: "host-slash") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258756 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258765 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-env-overrides\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258785 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-ovn-kubernetes\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258814 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.258979 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-log-socket\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259013 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-log-socket" (OuterVolumeSpecName: "log-socket") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259056 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-kubelet\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259081 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259139 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259177 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259186 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-systemd-units\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259223 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-openvswitch\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259239 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259263 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-config\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259291 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-systemd\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259324 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259336 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-ovn\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259363 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-script-lib\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259388 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-var-lib-openvswitch\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259421 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnwm2\" (UniqueName: \"kubernetes.io/projected/65d45c3c-cac7-4578-b0b5-05e546d8e356-kube-api-access-fnwm2\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259464 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-var-lib-cni-networks-ovn-kubernetes\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259493 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-bin\") pod \"65d45c3c-cac7-4578-b0b5-05e546d8e356\" (UID: \"65d45c3c-cac7-4578-b0b5-05e546d8e356\") " Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259589 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259625 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259678 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259779 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259763 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259820 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-node-log\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259849 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9de33417-6ba9-4c5c-a04d-7dab1f22d022-ovnkube-script-lib\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259870 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-systemd-units\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259889 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9de33417-6ba9-4c5c-a04d-7dab1f22d022-env-overrides\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259915 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-run-netns\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.259937 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9de33417-6ba9-4c5c-a04d-7dab1f22d022-ovn-node-metrics-cert\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260009 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9de33417-6ba9-4c5c-a04d-7dab1f22d022-ovnkube-config\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260035 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-cni-bin\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260062 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-var-lib-openvswitch\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260097 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-kubelet\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260129 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-run-systemd\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260149 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-cni-netd\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260173 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-slash\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260197 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-run-ovn\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260252 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-run-openvswitch\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260331 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-run-ovn-kubernetes\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260371 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260393 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-etc-openvswitch\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260409 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42bg7\" (UniqueName: \"kubernetes.io/projected/9de33417-6ba9-4c5c-a04d-7dab1f22d022-kube-api-access-42bg7\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260451 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-log-socket\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260650 5070 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260669 5070 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260681 5070 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260691 5070 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-log-socket\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260699 5070 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260707 5070 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260717 5070 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260726 5070 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260735 5070 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260744 5070 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260752 5070 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260761 5070 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260771 5070 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-node-log\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260779 5070 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260786 5070 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.260794 5070 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-host-slash\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.261226 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.264980 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65d45c3c-cac7-4578-b0b5-05e546d8e356-kube-api-access-fnwm2" (OuterVolumeSpecName: "kube-api-access-fnwm2") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "kube-api-access-fnwm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.278690 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.281853 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "65d45c3c-cac7-4578-b0b5-05e546d8e356" (UID: "65d45c3c-cac7-4578-b0b5-05e546d8e356"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.361789 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-run-systemd\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.361867 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-cni-netd\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.361913 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-slash\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.361942 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-run-systemd\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.361955 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-run-ovn\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362019 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-run-ovn\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362066 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-cni-netd\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362112 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-run-openvswitch\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362091 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-run-openvswitch\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362066 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-slash\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362177 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-run-ovn-kubernetes\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362210 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362233 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-etc-openvswitch\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362236 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-run-ovn-kubernetes\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362250 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42bg7\" (UniqueName: \"kubernetes.io/projected/9de33417-6ba9-4c5c-a04d-7dab1f22d022-kube-api-access-42bg7\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362288 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-log-socket\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362307 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-etc-openvswitch\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362322 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-node-log\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362341 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9de33417-6ba9-4c5c-a04d-7dab1f22d022-ovnkube-script-lib\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362348 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-log-socket\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362360 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9de33417-6ba9-4c5c-a04d-7dab1f22d022-env-overrides\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362388 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-systemd-units\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362412 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-run-netns\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362424 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-node-log\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362432 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9de33417-6ba9-4c5c-a04d-7dab1f22d022-ovn-node-metrics-cert\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362558 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9de33417-6ba9-4c5c-a04d-7dab1f22d022-ovnkube-config\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362593 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-run-netns\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362576 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-systemd-units\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362616 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-cni-bin\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362668 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-cni-bin\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362670 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-var-lib-openvswitch\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362720 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-var-lib-openvswitch\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362739 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-kubelet\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362727 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362793 5070 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/65d45c3c-cac7-4578-b0b5-05e546d8e356-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362794 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9de33417-6ba9-4c5c-a04d-7dab1f22d022-host-kubelet\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362810 5070 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362826 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnwm2\" (UniqueName: \"kubernetes.io/projected/65d45c3c-cac7-4578-b0b5-05e546d8e356-kube-api-access-fnwm2\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.362839 5070 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65d45c3c-cac7-4578-b0b5-05e546d8e356-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.363176 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9de33417-6ba9-4c5c-a04d-7dab1f22d022-ovnkube-script-lib\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.363489 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9de33417-6ba9-4c5c-a04d-7dab1f22d022-env-overrides\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.363630 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9de33417-6ba9-4c5c-a04d-7dab1f22d022-ovnkube-config\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.365473 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9de33417-6ba9-4c5c-a04d-7dab1f22d022-ovn-node-metrics-cert\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.380001 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42bg7\" (UniqueName: \"kubernetes.io/projected/9de33417-6ba9-4c5c-a04d-7dab1f22d022-kube-api-access-42bg7\") pod \"ovnkube-node-7qln4\" (UID: \"9de33417-6ba9-4c5c-a04d-7dab1f22d022\") " pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.479800 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:21:59 crc kubenswrapper[5070]: W1213 03:21:59.503103 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9de33417_6ba9_4c5c_a04d_7dab1f22d022.slice/crio-77308b6e2b9ecc9366e18c82808a5e916e825e18e51266ba9edfbd4216a7d9f6 WatchSource:0}: Error finding container 77308b6e2b9ecc9366e18c82808a5e916e825e18e51266ba9edfbd4216a7d9f6: Status 404 returned error can't find the container with id 77308b6e2b9ecc9366e18c82808a5e916e825e18e51266ba9edfbd4216a7d9f6 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.716739 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" event={"ID":"9de33417-6ba9-4c5c-a04d-7dab1f22d022","Type":"ContainerStarted","Data":"77308b6e2b9ecc9366e18c82808a5e916e825e18e51266ba9edfbd4216a7d9f6"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.721875 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7pmp8_65d45c3c-cac7-4578-b0b5-05e546d8e356/ovn-acl-logging/0.log" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.722688 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7pmp8_65d45c3c-cac7-4578-b0b5-05e546d8e356/ovn-controller/0.log" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.723892 5070 generic.go:334] "Generic (PLEG): container finished" podID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerID="91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead" exitCode=0 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.723920 5070 generic.go:334] "Generic (PLEG): container finished" podID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerID="ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41" exitCode=0 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.723931 5070 generic.go:334] "Generic (PLEG): container finished" podID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerID="4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c" exitCode=0 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.723941 5070 generic.go:334] "Generic (PLEG): container finished" podID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerID="16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4" exitCode=0 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.723950 5070 generic.go:334] "Generic (PLEG): container finished" podID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerID="6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58" exitCode=0 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.723959 5070 generic.go:334] "Generic (PLEG): container finished" podID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerID="2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c" exitCode=0 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.723968 5070 generic.go:334] "Generic (PLEG): container finished" podID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerID="941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f" exitCode=143 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.723977 5070 generic.go:334] "Generic (PLEG): container finished" podID="65d45c3c-cac7-4578-b0b5-05e546d8e356" containerID="d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3" exitCode=143 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724044 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerDied","Data":"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724112 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerDied","Data":"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724133 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerDied","Data":"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724149 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerDied","Data":"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724055 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724165 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerDied","Data":"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724189 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerDied","Data":"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724211 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724228 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724238 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724249 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerDied","Data":"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724262 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724274 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724283 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724291 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724300 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724313 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724323 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724333 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724344 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724355 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerDied","Data":"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724369 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724379 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724388 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724396 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724406 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724414 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724424 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724433 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724411 5070 scope.go:117] "RemoveContainer" containerID="91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724465 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724647 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7pmp8" event={"ID":"65d45c3c-cac7-4578-b0b5-05e546d8e356","Type":"ContainerDied","Data":"b0410c0d7afa2272cf891cf4ca13aeb146961247c1674a992bb8963207013d18"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724885 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724901 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724915 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724924 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724932 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724939 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724947 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724954 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.724960 5070 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.731192 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c4c69_7ecb3a4a-4966-4cd1-bf07-aec91cf4212e/kube-multus/0.log" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.731255 5070 generic.go:334] "Generic (PLEG): container finished" podID="7ecb3a4a-4966-4cd1-bf07-aec91cf4212e" containerID="dd65753d5cfdc23519bf1aade864ce59e3d4ac4a3a3751c963e4ba0103a418aa" exitCode=2 Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.731300 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c4c69" event={"ID":"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e","Type":"ContainerDied","Data":"dd65753d5cfdc23519bf1aade864ce59e3d4ac4a3a3751c963e4ba0103a418aa"} Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.731888 5070 scope.go:117] "RemoveContainer" containerID="dd65753d5cfdc23519bf1aade864ce59e3d4ac4a3a3751c963e4ba0103a418aa" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.756530 5070 scope.go:117] "RemoveContainer" containerID="ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.803673 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7pmp8"] Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.806116 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7pmp8"] Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.821723 5070 scope.go:117] "RemoveContainer" containerID="4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.859488 5070 scope.go:117] "RemoveContainer" containerID="16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.877491 5070 scope.go:117] "RemoveContainer" containerID="6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.890131 5070 scope.go:117] "RemoveContainer" containerID="2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.902178 5070 scope.go:117] "RemoveContainer" containerID="941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.932781 5070 scope.go:117] "RemoveContainer" containerID="d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.946301 5070 scope.go:117] "RemoveContainer" containerID="dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.974836 5070 scope.go:117] "RemoveContainer" containerID="91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.975301 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": container with ID starting with 91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead not found: ID does not exist" containerID="91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.975337 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead"} err="failed to get container status \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": rpc error: code = NotFound desc = could not find container \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": container with ID starting with 91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.975364 5070 scope.go:117] "RemoveContainer" containerID="ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.975768 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": container with ID starting with ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41 not found: ID does not exist" containerID="ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.975806 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41"} err="failed to get container status \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": rpc error: code = NotFound desc = could not find container \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": container with ID starting with ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.975833 5070 scope.go:117] "RemoveContainer" containerID="4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.976221 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": container with ID starting with 4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c not found: ID does not exist" containerID="4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.976260 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c"} err="failed to get container status \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": rpc error: code = NotFound desc = could not find container \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": container with ID starting with 4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.976291 5070 scope.go:117] "RemoveContainer" containerID="16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.976608 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": container with ID starting with 16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4 not found: ID does not exist" containerID="16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.976632 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4"} err="failed to get container status \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": rpc error: code = NotFound desc = could not find container \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": container with ID starting with 16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.976655 5070 scope.go:117] "RemoveContainer" containerID="6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.977030 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": container with ID starting with 6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58 not found: ID does not exist" containerID="6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.977057 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58"} err="failed to get container status \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": rpc error: code = NotFound desc = could not find container \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": container with ID starting with 6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.977071 5070 scope.go:117] "RemoveContainer" containerID="2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.977339 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": container with ID starting with 2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c not found: ID does not exist" containerID="2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.977366 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c"} err="failed to get container status \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": rpc error: code = NotFound desc = could not find container \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": container with ID starting with 2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.977379 5070 scope.go:117] "RemoveContainer" containerID="941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.977778 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f\": container with ID starting with 941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f not found: ID does not exist" containerID="941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.977796 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f"} err="failed to get container status \"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f\": rpc error: code = NotFound desc = could not find container \"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f\": container with ID starting with 941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.977807 5070 scope.go:117] "RemoveContainer" containerID="d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.978174 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3\": container with ID starting with d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3 not found: ID does not exist" containerID="d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.978193 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3"} err="failed to get container status \"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3\": rpc error: code = NotFound desc = could not find container \"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3\": container with ID starting with d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.978205 5070 scope.go:117] "RemoveContainer" containerID="dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f" Dec 13 03:21:59 crc kubenswrapper[5070]: E1213 03:21:59.978467 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f\": container with ID starting with dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f not found: ID does not exist" containerID="dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.978486 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f"} err="failed to get container status \"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f\": rpc error: code = NotFound desc = could not find container \"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f\": container with ID starting with dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.978498 5070 scope.go:117] "RemoveContainer" containerID="91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.978705 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead"} err="failed to get container status \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": rpc error: code = NotFound desc = could not find container \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": container with ID starting with 91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.978724 5070 scope.go:117] "RemoveContainer" containerID="ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.979027 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41"} err="failed to get container status \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": rpc error: code = NotFound desc = could not find container \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": container with ID starting with ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.979044 5070 scope.go:117] "RemoveContainer" containerID="4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.979278 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c"} err="failed to get container status \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": rpc error: code = NotFound desc = could not find container \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": container with ID starting with 4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.979293 5070 scope.go:117] "RemoveContainer" containerID="16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.979818 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4"} err="failed to get container status \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": rpc error: code = NotFound desc = could not find container \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": container with ID starting with 16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.979861 5070 scope.go:117] "RemoveContainer" containerID="6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.980130 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58"} err="failed to get container status \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": rpc error: code = NotFound desc = could not find container \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": container with ID starting with 6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.980149 5070 scope.go:117] "RemoveContainer" containerID="2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.980427 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c"} err="failed to get container status \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": rpc error: code = NotFound desc = could not find container \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": container with ID starting with 2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.980457 5070 scope.go:117] "RemoveContainer" containerID="941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.980700 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f"} err="failed to get container status \"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f\": rpc error: code = NotFound desc = could not find container \"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f\": container with ID starting with 941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.980749 5070 scope.go:117] "RemoveContainer" containerID="d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.980978 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3"} err="failed to get container status \"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3\": rpc error: code = NotFound desc = could not find container \"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3\": container with ID starting with d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.980997 5070 scope.go:117] "RemoveContainer" containerID="dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.981316 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f"} err="failed to get container status \"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f\": rpc error: code = NotFound desc = could not find container \"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f\": container with ID starting with dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.981334 5070 scope.go:117] "RemoveContainer" containerID="91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.981585 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead"} err="failed to get container status \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": rpc error: code = NotFound desc = could not find container \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": container with ID starting with 91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.981605 5070 scope.go:117] "RemoveContainer" containerID="ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.981811 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41"} err="failed to get container status \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": rpc error: code = NotFound desc = could not find container \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": container with ID starting with ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.981829 5070 scope.go:117] "RemoveContainer" containerID="4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.982032 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c"} err="failed to get container status \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": rpc error: code = NotFound desc = could not find container \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": container with ID starting with 4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.982051 5070 scope.go:117] "RemoveContainer" containerID="16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.982357 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4"} err="failed to get container status \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": rpc error: code = NotFound desc = could not find container \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": container with ID starting with 16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.982402 5070 scope.go:117] "RemoveContainer" containerID="6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.982674 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58"} err="failed to get container status \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": rpc error: code = NotFound desc = could not find container \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": container with ID starting with 6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.982696 5070 scope.go:117] "RemoveContainer" containerID="2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.982951 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c"} err="failed to get container status \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": rpc error: code = NotFound desc = could not find container \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": container with ID starting with 2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.982976 5070 scope.go:117] "RemoveContainer" containerID="941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.983355 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f"} err="failed to get container status \"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f\": rpc error: code = NotFound desc = could not find container \"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f\": container with ID starting with 941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.983381 5070 scope.go:117] "RemoveContainer" containerID="d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.983647 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3"} err="failed to get container status \"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3\": rpc error: code = NotFound desc = could not find container \"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3\": container with ID starting with d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.983670 5070 scope.go:117] "RemoveContainer" containerID="dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.983910 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f"} err="failed to get container status \"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f\": rpc error: code = NotFound desc = could not find container \"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f\": container with ID starting with dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.983931 5070 scope.go:117] "RemoveContainer" containerID="91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.984170 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead"} err="failed to get container status \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": rpc error: code = NotFound desc = could not find container \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": container with ID starting with 91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.984208 5070 scope.go:117] "RemoveContainer" containerID="ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.984476 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41"} err="failed to get container status \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": rpc error: code = NotFound desc = could not find container \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": container with ID starting with ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.984507 5070 scope.go:117] "RemoveContainer" containerID="4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.984794 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c"} err="failed to get container status \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": rpc error: code = NotFound desc = could not find container \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": container with ID starting with 4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.984815 5070 scope.go:117] "RemoveContainer" containerID="16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.985064 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4"} err="failed to get container status \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": rpc error: code = NotFound desc = could not find container \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": container with ID starting with 16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.985087 5070 scope.go:117] "RemoveContainer" containerID="6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.985433 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58"} err="failed to get container status \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": rpc error: code = NotFound desc = could not find container \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": container with ID starting with 6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.985483 5070 scope.go:117] "RemoveContainer" containerID="2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.985775 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c"} err="failed to get container status \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": rpc error: code = NotFound desc = could not find container \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": container with ID starting with 2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.985793 5070 scope.go:117] "RemoveContainer" containerID="941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.986158 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f"} err="failed to get container status \"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f\": rpc error: code = NotFound desc = could not find container \"941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f\": container with ID starting with 941e50e8345cec7c54b5ea9522c2d9e024167e081dc86bc28f5d6ff032614c5f not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.986186 5070 scope.go:117] "RemoveContainer" containerID="d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.986491 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3"} err="failed to get container status \"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3\": rpc error: code = NotFound desc = could not find container \"d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3\": container with ID starting with d5d76fb1d00b41d6c27f84434b50a1211c98aa213bba1cb7988b2c35d33f07f3 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.986511 5070 scope.go:117] "RemoveContainer" containerID="dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.986847 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f"} err="failed to get container status \"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f\": rpc error: code = NotFound desc = could not find container \"dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f\": container with ID starting with dfd5f39ce1d4e9d62672946aad6fb238ea657dbee23811712271177505bc948f not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.986926 5070 scope.go:117] "RemoveContainer" containerID="91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.987222 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead"} err="failed to get container status \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": rpc error: code = NotFound desc = could not find container \"91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead\": container with ID starting with 91a8a7efc0acce32b579fb842605e71f2d10df826e4376f880e4698d3a139ead not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.987243 5070 scope.go:117] "RemoveContainer" containerID="ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.987555 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41"} err="failed to get container status \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": rpc error: code = NotFound desc = could not find container \"ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41\": container with ID starting with ede503753ce6d96a69a99c666946f468b83ecde77313d8d29f62b978a7c99e41 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.987591 5070 scope.go:117] "RemoveContainer" containerID="4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.987843 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c"} err="failed to get container status \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": rpc error: code = NotFound desc = could not find container \"4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c\": container with ID starting with 4455aebd8013b0ae551a376b4bfc7745c2744abf709223a6d9a975aa63b8da9c not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.987867 5070 scope.go:117] "RemoveContainer" containerID="16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.988216 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4"} err="failed to get container status \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": rpc error: code = NotFound desc = could not find container \"16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4\": container with ID starting with 16558a9632bfa96e34363fb4b5c94c8fdd5c08e0d462bffe09a7147d0212a7e4 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.988236 5070 scope.go:117] "RemoveContainer" containerID="6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.988477 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58"} err="failed to get container status \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": rpc error: code = NotFound desc = could not find container \"6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58\": container with ID starting with 6f7f21f84fa1339e71e86a8fa1512888749a1ecf63fb88ac90242f55cbad2b58 not found: ID does not exist" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.988496 5070 scope.go:117] "RemoveContainer" containerID="2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c" Dec 13 03:21:59 crc kubenswrapper[5070]: I1213 03:21:59.988744 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c"} err="failed to get container status \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": rpc error: code = NotFound desc = could not find container \"2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c\": container with ID starting with 2765090ae9ec41771df9f957049ea3af811f7a0ab5c2a5bce66e198101d8a15c not found: ID does not exist" Dec 13 03:22:00 crc kubenswrapper[5070]: I1213 03:22:00.181030 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65d45c3c-cac7-4578-b0b5-05e546d8e356" path="/var/lib/kubelet/pods/65d45c3c-cac7-4578-b0b5-05e546d8e356/volumes" Dec 13 03:22:00 crc kubenswrapper[5070]: I1213 03:22:00.740550 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c4c69_7ecb3a4a-4966-4cd1-bf07-aec91cf4212e/kube-multus/0.log" Dec 13 03:22:00 crc kubenswrapper[5070]: I1213 03:22:00.740633 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c4c69" event={"ID":"7ecb3a4a-4966-4cd1-bf07-aec91cf4212e","Type":"ContainerStarted","Data":"5ca387cf9a4f1596eab80e99f95e89eeec1e7535e43a81fe022d64997150cd1e"} Dec 13 03:22:00 crc kubenswrapper[5070]: I1213 03:22:00.742616 5070 generic.go:334] "Generic (PLEG): container finished" podID="9de33417-6ba9-4c5c-a04d-7dab1f22d022" containerID="436f82bb5ccc57cd3e10e74163ce8cb298600c46755bd21f8c1b35327d2fddfe" exitCode=0 Dec 13 03:22:00 crc kubenswrapper[5070]: I1213 03:22:00.742679 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" event={"ID":"9de33417-6ba9-4c5c-a04d-7dab1f22d022","Type":"ContainerDied","Data":"436f82bb5ccc57cd3e10e74163ce8cb298600c46755bd21f8c1b35327d2fddfe"} Dec 13 03:22:01 crc kubenswrapper[5070]: I1213 03:22:01.753469 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" event={"ID":"9de33417-6ba9-4c5c-a04d-7dab1f22d022","Type":"ContainerStarted","Data":"b7f7a6727566815305418fe036c57e6a026979a5c2d2fc102d988b1e3b5cc8ab"} Dec 13 03:22:01 crc kubenswrapper[5070]: I1213 03:22:01.753848 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" event={"ID":"9de33417-6ba9-4c5c-a04d-7dab1f22d022","Type":"ContainerStarted","Data":"a754af89c84b84210fb7def49a4b9caf8e27fb4d198ef29b3e846c989ce54fda"} Dec 13 03:22:01 crc kubenswrapper[5070]: I1213 03:22:01.753869 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" event={"ID":"9de33417-6ba9-4c5c-a04d-7dab1f22d022","Type":"ContainerStarted","Data":"c3295e80ce543a4c5bcd614c8f33650de9426db5eaa28e90560d09a428a0eb22"} Dec 13 03:22:01 crc kubenswrapper[5070]: I1213 03:22:01.753885 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" event={"ID":"9de33417-6ba9-4c5c-a04d-7dab1f22d022","Type":"ContainerStarted","Data":"476e8c720b1face78b86d6cdc66766ef0fd015d3241219bd831f8b09eefd3ea5"} Dec 13 03:22:02 crc kubenswrapper[5070]: I1213 03:22:02.769131 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" event={"ID":"9de33417-6ba9-4c5c-a04d-7dab1f22d022","Type":"ContainerStarted","Data":"e191836cfe968c66673e9e8567ffc7f2b6502a89566ec0cf848a73407f8d1cd8"} Dec 13 03:22:02 crc kubenswrapper[5070]: I1213 03:22:02.769521 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" event={"ID":"9de33417-6ba9-4c5c-a04d-7dab1f22d022","Type":"ContainerStarted","Data":"8c7ce62ab30e4a9070332f8cb80a8be1a3dbbd3b67cf82195307c2f272dd190c"} Dec 13 03:22:03 crc kubenswrapper[5070]: I1213 03:22:03.017163 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjw2w" Dec 13 03:22:04 crc kubenswrapper[5070]: I1213 03:22:04.782372 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" event={"ID":"9de33417-6ba9-4c5c-a04d-7dab1f22d022","Type":"ContainerStarted","Data":"d280922e888a3c3dd60956a9e18d957c66bb247c08d7ea0724932cef879de9ec"} Dec 13 03:22:06 crc kubenswrapper[5070]: I1213 03:22:06.804855 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" event={"ID":"9de33417-6ba9-4c5c-a04d-7dab1f22d022","Type":"ContainerStarted","Data":"921fd3cd472f8a40bdea41a847aae2f4353198a0b3f6edc25b3a529a48efe2f3"} Dec 13 03:22:06 crc kubenswrapper[5070]: I1213 03:22:06.805819 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:22:06 crc kubenswrapper[5070]: I1213 03:22:06.805939 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:22:06 crc kubenswrapper[5070]: I1213 03:22:06.806050 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:22:06 crc kubenswrapper[5070]: I1213 03:22:06.837657 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" podStartSLOduration=7.837640058 podStartE2EDuration="7.837640058s" podCreationTimestamp="2025-12-13 03:21:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:22:06.834376279 +0000 UTC m=+619.070219825" watchObservedRunningTime="2025-12-13 03:22:06.837640058 +0000 UTC m=+619.073483604" Dec 13 03:22:06 crc kubenswrapper[5070]: I1213 03:22:06.839037 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:22:06 crc kubenswrapper[5070]: I1213 03:22:06.843307 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:22:21 crc kubenswrapper[5070]: I1213 03:22:21.943062 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:22:21 crc kubenswrapper[5070]: I1213 03:22:21.943780 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:22:29 crc kubenswrapper[5070]: I1213 03:22:29.504853 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7qln4" Dec 13 03:22:49 crc kubenswrapper[5070]: I1213 03:22:49.265730 5070 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.223141 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm"] Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.224353 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.226255 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.233278 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm"] Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.261952 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcmjm\" (UniqueName: \"kubernetes.io/projected/4461d5cb-115e-4a85-92e2-066539781b0c-kube-api-access-lcmjm\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.261997 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.262086 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.363327 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.363399 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcmjm\" (UniqueName: \"kubernetes.io/projected/4461d5cb-115e-4a85-92e2-066539781b0c-kube-api-access-lcmjm\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.363421 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.363904 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.363971 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.383690 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcmjm\" (UniqueName: \"kubernetes.io/projected/4461d5cb-115e-4a85-92e2-066539781b0c-kube-api-access-lcmjm\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.542282 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.801322 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm"] Dec 13 03:22:51 crc kubenswrapper[5070]: W1213 03:22:51.809677 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4461d5cb_115e_4a85_92e2_066539781b0c.slice/crio-20509c62f18a622ee5dbac047358898059b63af3007bcccd5aa55ebafe4d5145 WatchSource:0}: Error finding container 20509c62f18a622ee5dbac047358898059b63af3007bcccd5aa55ebafe4d5145: Status 404 returned error can't find the container with id 20509c62f18a622ee5dbac047358898059b63af3007bcccd5aa55ebafe4d5145 Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.943251 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.943310 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.943352 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.943905 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b5e0b24c2b047322a8bb0e293689c3f0d2f12b10fd38f01969f32e9cc47932b2"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 03:22:51 crc kubenswrapper[5070]: I1213 03:22:51.943953 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://b5e0b24c2b047322a8bb0e293689c3f0d2f12b10fd38f01969f32e9cc47932b2" gracePeriod=600 Dec 13 03:22:52 crc kubenswrapper[5070]: I1213 03:22:52.061988 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" event={"ID":"4461d5cb-115e-4a85-92e2-066539781b0c","Type":"ContainerStarted","Data":"8d564ad29d2183820d9b2007cfe02ad0ae69eef3084495b2b92d8d4e186a98dc"} Dec 13 03:22:52 crc kubenswrapper[5070]: I1213 03:22:52.062030 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" event={"ID":"4461d5cb-115e-4a85-92e2-066539781b0c","Type":"ContainerStarted","Data":"20509c62f18a622ee5dbac047358898059b63af3007bcccd5aa55ebafe4d5145"} Dec 13 03:22:52 crc kubenswrapper[5070]: I1213 03:22:52.063454 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="b5e0b24c2b047322a8bb0e293689c3f0d2f12b10fd38f01969f32e9cc47932b2" exitCode=0 Dec 13 03:22:52 crc kubenswrapper[5070]: I1213 03:22:52.063492 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"b5e0b24c2b047322a8bb0e293689c3f0d2f12b10fd38f01969f32e9cc47932b2"} Dec 13 03:22:52 crc kubenswrapper[5070]: I1213 03:22:52.063514 5070 scope.go:117] "RemoveContainer" containerID="4e0db15645022c250ea5ee329cdc3679d197f7e9c4f22c7d342a54394aac732f" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.069245 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"440f1b13ac82692ab2897557fa86ac3ce3eba37cec807bd53246344ef4b3c0b6"} Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.521671 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z9z2l"] Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.522935 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.545141 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z9z2l"] Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.587044 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-utilities\") pod \"redhat-operators-z9z2l\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.587088 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-catalog-content\") pod \"redhat-operators-z9z2l\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.587122 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f9sj\" (UniqueName: \"kubernetes.io/projected/20629986-5936-4dbd-b551-24dbf5efc93e-kube-api-access-8f9sj\") pod \"redhat-operators-z9z2l\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.687768 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-utilities\") pod \"redhat-operators-z9z2l\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.687840 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-catalog-content\") pod \"redhat-operators-z9z2l\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.687893 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f9sj\" (UniqueName: \"kubernetes.io/projected/20629986-5936-4dbd-b551-24dbf5efc93e-kube-api-access-8f9sj\") pod \"redhat-operators-z9z2l\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.688176 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-utilities\") pod \"redhat-operators-z9z2l\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.688192 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-catalog-content\") pod \"redhat-operators-z9z2l\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.712699 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f9sj\" (UniqueName: \"kubernetes.io/projected/20629986-5936-4dbd-b551-24dbf5efc93e-kube-api-access-8f9sj\") pod \"redhat-operators-z9z2l\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:53 crc kubenswrapper[5070]: I1213 03:22:53.884223 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:22:54 crc kubenswrapper[5070]: I1213 03:22:54.076139 5070 generic.go:334] "Generic (PLEG): container finished" podID="4461d5cb-115e-4a85-92e2-066539781b0c" containerID="8d564ad29d2183820d9b2007cfe02ad0ae69eef3084495b2b92d8d4e186a98dc" exitCode=0 Dec 13 03:22:54 crc kubenswrapper[5070]: I1213 03:22:54.076902 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" event={"ID":"4461d5cb-115e-4a85-92e2-066539781b0c","Type":"ContainerDied","Data":"8d564ad29d2183820d9b2007cfe02ad0ae69eef3084495b2b92d8d4e186a98dc"} Dec 13 03:22:54 crc kubenswrapper[5070]: I1213 03:22:54.270535 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z9z2l"] Dec 13 03:22:55 crc kubenswrapper[5070]: I1213 03:22:55.082870 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9z2l" event={"ID":"20629986-5936-4dbd-b551-24dbf5efc93e","Type":"ContainerStarted","Data":"0bab7d8dd7a5ba6bb483c8a882c934dadeb3967864c59d72027f70302f6331c4"} Dec 13 03:22:57 crc kubenswrapper[5070]: I1213 03:22:57.100177 5070 generic.go:334] "Generic (PLEG): container finished" podID="20629986-5936-4dbd-b551-24dbf5efc93e" containerID="fa1aa7c20a52cbdf94cd7c495bbc601a91380d8c4e2a8ae8ea679e9d6a4b8406" exitCode=0 Dec 13 03:22:57 crc kubenswrapper[5070]: I1213 03:22:57.100326 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9z2l" event={"ID":"20629986-5936-4dbd-b551-24dbf5efc93e","Type":"ContainerDied","Data":"fa1aa7c20a52cbdf94cd7c495bbc601a91380d8c4e2a8ae8ea679e9d6a4b8406"} Dec 13 03:22:58 crc kubenswrapper[5070]: I1213 03:22:58.108505 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" event={"ID":"4461d5cb-115e-4a85-92e2-066539781b0c","Type":"ContainerStarted","Data":"67ce48b3f6fa4542c79eb1dafe81b4ae84ed536adee8b3e7596b460f750ba48a"} Dec 13 03:22:59 crc kubenswrapper[5070]: I1213 03:22:59.125470 5070 generic.go:334] "Generic (PLEG): container finished" podID="4461d5cb-115e-4a85-92e2-066539781b0c" containerID="67ce48b3f6fa4542c79eb1dafe81b4ae84ed536adee8b3e7596b460f750ba48a" exitCode=0 Dec 13 03:22:59 crc kubenswrapper[5070]: I1213 03:22:59.125606 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" event={"ID":"4461d5cb-115e-4a85-92e2-066539781b0c","Type":"ContainerDied","Data":"67ce48b3f6fa4542c79eb1dafe81b4ae84ed536adee8b3e7596b460f750ba48a"} Dec 13 03:22:59 crc kubenswrapper[5070]: I1213 03:22:59.134350 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9z2l" event={"ID":"20629986-5936-4dbd-b551-24dbf5efc93e","Type":"ContainerStarted","Data":"efc5bd68e4c4eb90c2a40bf34dc49719bf3c21c05057e578c7cee891b3ed4003"} Dec 13 03:23:00 crc kubenswrapper[5070]: I1213 03:23:00.147872 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" event={"ID":"4461d5cb-115e-4a85-92e2-066539781b0c","Type":"ContainerStarted","Data":"fe681aeffb8692f25236a0ad2bedb21d5e599c3365dbea93041102a951c4ec38"} Dec 13 03:23:01 crc kubenswrapper[5070]: I1213 03:23:01.156091 5070 generic.go:334] "Generic (PLEG): container finished" podID="20629986-5936-4dbd-b551-24dbf5efc93e" containerID="efc5bd68e4c4eb90c2a40bf34dc49719bf3c21c05057e578c7cee891b3ed4003" exitCode=0 Dec 13 03:23:01 crc kubenswrapper[5070]: I1213 03:23:01.156166 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9z2l" event={"ID":"20629986-5936-4dbd-b551-24dbf5efc93e","Type":"ContainerDied","Data":"efc5bd68e4c4eb90c2a40bf34dc49719bf3c21c05057e578c7cee891b3ed4003"} Dec 13 03:23:01 crc kubenswrapper[5070]: I1213 03:23:01.160985 5070 generic.go:334] "Generic (PLEG): container finished" podID="4461d5cb-115e-4a85-92e2-066539781b0c" containerID="fe681aeffb8692f25236a0ad2bedb21d5e599c3365dbea93041102a951c4ec38" exitCode=0 Dec 13 03:23:01 crc kubenswrapper[5070]: I1213 03:23:01.161021 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" event={"ID":"4461d5cb-115e-4a85-92e2-066539781b0c","Type":"ContainerDied","Data":"fe681aeffb8692f25236a0ad2bedb21d5e599c3365dbea93041102a951c4ec38"} Dec 13 03:23:02 crc kubenswrapper[5070]: I1213 03:23:02.500222 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:23:02 crc kubenswrapper[5070]: I1213 03:23:02.614204 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcmjm\" (UniqueName: \"kubernetes.io/projected/4461d5cb-115e-4a85-92e2-066539781b0c-kube-api-access-lcmjm\") pod \"4461d5cb-115e-4a85-92e2-066539781b0c\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " Dec 13 03:23:02 crc kubenswrapper[5070]: I1213 03:23:02.614632 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-util\") pod \"4461d5cb-115e-4a85-92e2-066539781b0c\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " Dec 13 03:23:02 crc kubenswrapper[5070]: I1213 03:23:02.614742 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-bundle\") pod \"4461d5cb-115e-4a85-92e2-066539781b0c\" (UID: \"4461d5cb-115e-4a85-92e2-066539781b0c\") " Dec 13 03:23:02 crc kubenswrapper[5070]: I1213 03:23:02.615549 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-bundle" (OuterVolumeSpecName: "bundle") pod "4461d5cb-115e-4a85-92e2-066539781b0c" (UID: "4461d5cb-115e-4a85-92e2-066539781b0c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:23:02 crc kubenswrapper[5070]: I1213 03:23:02.624788 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-util" (OuterVolumeSpecName: "util") pod "4461d5cb-115e-4a85-92e2-066539781b0c" (UID: "4461d5cb-115e-4a85-92e2-066539781b0c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:23:02 crc kubenswrapper[5070]: I1213 03:23:02.626967 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4461d5cb-115e-4a85-92e2-066539781b0c-kube-api-access-lcmjm" (OuterVolumeSpecName: "kube-api-access-lcmjm") pod "4461d5cb-115e-4a85-92e2-066539781b0c" (UID: "4461d5cb-115e-4a85-92e2-066539781b0c"). InnerVolumeSpecName "kube-api-access-lcmjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:23:02 crc kubenswrapper[5070]: I1213 03:23:02.716972 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcmjm\" (UniqueName: \"kubernetes.io/projected/4461d5cb-115e-4a85-92e2-066539781b0c-kube-api-access-lcmjm\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:02 crc kubenswrapper[5070]: I1213 03:23:02.717039 5070 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-util\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:02 crc kubenswrapper[5070]: I1213 03:23:02.717062 5070 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4461d5cb-115e-4a85-92e2-066539781b0c-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:03 crc kubenswrapper[5070]: I1213 03:23:03.175892 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9z2l" event={"ID":"20629986-5936-4dbd-b551-24dbf5efc93e","Type":"ContainerStarted","Data":"82b65dafe74a30143479f9c89214c27e250f187063649f63af9ba859b7570fc7"} Dec 13 03:23:03 crc kubenswrapper[5070]: I1213 03:23:03.177739 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" event={"ID":"4461d5cb-115e-4a85-92e2-066539781b0c","Type":"ContainerDied","Data":"20509c62f18a622ee5dbac047358898059b63af3007bcccd5aa55ebafe4d5145"} Dec 13 03:23:03 crc kubenswrapper[5070]: I1213 03:23:03.177794 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20509c62f18a622ee5dbac047358898059b63af3007bcccd5aa55ebafe4d5145" Dec 13 03:23:03 crc kubenswrapper[5070]: I1213 03:23:03.177847 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm" Dec 13 03:23:03 crc kubenswrapper[5070]: I1213 03:23:03.199529 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z9z2l" podStartSLOduration=5.401400861 podStartE2EDuration="10.199504852s" podCreationTimestamp="2025-12-13 03:22:53 +0000 UTC" firstStartedPulling="2025-12-13 03:22:57.192803817 +0000 UTC m=+669.428647373" lastFinishedPulling="2025-12-13 03:23:01.990907818 +0000 UTC m=+674.226751364" observedRunningTime="2025-12-13 03:23:03.196543591 +0000 UTC m=+675.432387167" watchObservedRunningTime="2025-12-13 03:23:03.199504852 +0000 UTC m=+675.435348398" Dec 13 03:23:03 crc kubenswrapper[5070]: I1213 03:23:03.884671 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:23:03 crc kubenswrapper[5070]: I1213 03:23:03.884735 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:23:04 crc kubenswrapper[5070]: I1213 03:23:04.925354 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z9z2l" podUID="20629986-5936-4dbd-b551-24dbf5efc93e" containerName="registry-server" probeResult="failure" output=< Dec 13 03:23:04 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 03:23:04 crc kubenswrapper[5070]: > Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.897400 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-r8zml"] Dec 13 03:23:07 crc kubenswrapper[5070]: E1213 03:23:07.898916 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4461d5cb-115e-4a85-92e2-066539781b0c" containerName="util" Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.898954 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4461d5cb-115e-4a85-92e2-066539781b0c" containerName="util" Dec 13 03:23:07 crc kubenswrapper[5070]: E1213 03:23:07.898973 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4461d5cb-115e-4a85-92e2-066539781b0c" containerName="pull" Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.898984 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4461d5cb-115e-4a85-92e2-066539781b0c" containerName="pull" Dec 13 03:23:07 crc kubenswrapper[5070]: E1213 03:23:07.899003 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4461d5cb-115e-4a85-92e2-066539781b0c" containerName="extract" Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.899013 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4461d5cb-115e-4a85-92e2-066539781b0c" containerName="extract" Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.899158 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="4461d5cb-115e-4a85-92e2-066539781b0c" containerName="extract" Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.899735 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-r8zml" Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.901770 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.902035 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.902338 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-tqh9w" Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.908925 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-r8zml"] Dec 13 03:23:07 crc kubenswrapper[5070]: I1213 03:23:07.989398 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djl2k\" (UniqueName: \"kubernetes.io/projected/05a2a978-4fb0-43cb-8402-89e02e584eed-kube-api-access-djl2k\") pod \"nmstate-operator-6769fb99d-r8zml\" (UID: \"05a2a978-4fb0-43cb-8402-89e02e584eed\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-r8zml" Dec 13 03:23:08 crc kubenswrapper[5070]: I1213 03:23:08.090962 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djl2k\" (UniqueName: \"kubernetes.io/projected/05a2a978-4fb0-43cb-8402-89e02e584eed-kube-api-access-djl2k\") pod \"nmstate-operator-6769fb99d-r8zml\" (UID: \"05a2a978-4fb0-43cb-8402-89e02e584eed\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-r8zml" Dec 13 03:23:08 crc kubenswrapper[5070]: I1213 03:23:08.108737 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djl2k\" (UniqueName: \"kubernetes.io/projected/05a2a978-4fb0-43cb-8402-89e02e584eed-kube-api-access-djl2k\") pod \"nmstate-operator-6769fb99d-r8zml\" (UID: \"05a2a978-4fb0-43cb-8402-89e02e584eed\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-r8zml" Dec 13 03:23:08 crc kubenswrapper[5070]: I1213 03:23:08.214806 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-r8zml" Dec 13 03:23:08 crc kubenswrapper[5070]: I1213 03:23:08.651696 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-r8zml"] Dec 13 03:23:08 crc kubenswrapper[5070]: W1213 03:23:08.660342 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05a2a978_4fb0_43cb_8402_89e02e584eed.slice/crio-1f05fea114bc0392077828e5f1d0a00d57f8ee030ba20c15c60310edd68de095 WatchSource:0}: Error finding container 1f05fea114bc0392077828e5f1d0a00d57f8ee030ba20c15c60310edd68de095: Status 404 returned error can't find the container with id 1f05fea114bc0392077828e5f1d0a00d57f8ee030ba20c15c60310edd68de095 Dec 13 03:23:09 crc kubenswrapper[5070]: I1213 03:23:09.207037 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-r8zml" event={"ID":"05a2a978-4fb0-43cb-8402-89e02e584eed","Type":"ContainerStarted","Data":"1f05fea114bc0392077828e5f1d0a00d57f8ee030ba20c15c60310edd68de095"} Dec 13 03:23:13 crc kubenswrapper[5070]: I1213 03:23:13.232861 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-r8zml" event={"ID":"05a2a978-4fb0-43cb-8402-89e02e584eed","Type":"ContainerStarted","Data":"44201d478067f55dddc4b7829c105b6095de7288fd1c3d594c46c8a20d1b25b0"} Dec 13 03:23:13 crc kubenswrapper[5070]: I1213 03:23:13.259257 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-6769fb99d-r8zml" podStartSLOduration=1.969492626 podStartE2EDuration="6.259220041s" podCreationTimestamp="2025-12-13 03:23:07 +0000 UTC" firstStartedPulling="2025-12-13 03:23:08.66239127 +0000 UTC m=+680.898234806" lastFinishedPulling="2025-12-13 03:23:12.952118675 +0000 UTC m=+685.187962221" observedRunningTime="2025-12-13 03:23:13.249926137 +0000 UTC m=+685.485769723" watchObservedRunningTime="2025-12-13 03:23:13.259220041 +0000 UTC m=+685.495063627" Dec 13 03:23:13 crc kubenswrapper[5070]: I1213 03:23:13.958994 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.008986 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.260037 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-j499r"] Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.261108 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-j499r" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.262752 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-l47ql" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.278604 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-s4zck"] Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.279344 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.283042 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.305364 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-s4zck"] Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.322339 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-srhsc"] Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.324269 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.329494 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-j499r"] Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.366769 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/9cdef900-29f1-4965-a889-4086d7422f44-ovs-socket\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.366815 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82pzv\" (UniqueName: \"kubernetes.io/projected/7579c85c-ab24-4642-9bde-28ab06c7db9c-kube-api-access-82pzv\") pod \"nmstate-metrics-7f7f7578db-j499r\" (UID: \"7579c85c-ab24-4642-9bde-28ab06c7db9c\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-j499r" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.366835 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bls4l\" (UniqueName: \"kubernetes.io/projected/e1e57c1c-b54c-461c-b834-b5c4c9fc3c94-kube-api-access-bls4l\") pod \"nmstate-webhook-f8fb84555-s4zck\" (UID: \"e1e57c1c-b54c-461c-b834-b5c4c9fc3c94\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.369022 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv262\" (UniqueName: \"kubernetes.io/projected/9cdef900-29f1-4965-a889-4086d7422f44-kube-api-access-hv262\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.369075 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/9cdef900-29f1-4965-a889-4086d7422f44-dbus-socket\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.369661 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/9cdef900-29f1-4965-a889-4086d7422f44-nmstate-lock\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.369740 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e1e57c1c-b54c-461c-b834-b5c4c9fc3c94-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-s4zck\" (UID: \"e1e57c1c-b54c-461c-b834-b5c4c9fc3c94\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.401997 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d"] Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.412063 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d"] Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.412380 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.415191 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-mlcks" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.415468 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.416225 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471379 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-8xq5d\" (UID: \"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471466 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/9cdef900-29f1-4965-a889-4086d7422f44-ovs-socket\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471494 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82pzv\" (UniqueName: \"kubernetes.io/projected/7579c85c-ab24-4642-9bde-28ab06c7db9c-kube-api-access-82pzv\") pod \"nmstate-metrics-7f7f7578db-j499r\" (UID: \"7579c85c-ab24-4642-9bde-28ab06c7db9c\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-j499r" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471515 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bls4l\" (UniqueName: \"kubernetes.io/projected/e1e57c1c-b54c-461c-b834-b5c4c9fc3c94-kube-api-access-bls4l\") pod \"nmstate-webhook-f8fb84555-s4zck\" (UID: \"e1e57c1c-b54c-461c-b834-b5c4c9fc3c94\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471542 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-8xq5d\" (UID: \"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471572 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv262\" (UniqueName: \"kubernetes.io/projected/9cdef900-29f1-4965-a889-4086d7422f44-kube-api-access-hv262\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471595 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/9cdef900-29f1-4965-a889-4086d7422f44-dbus-socket\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471616 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/9cdef900-29f1-4965-a889-4086d7422f44-nmstate-lock\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471652 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e1e57c1c-b54c-461c-b834-b5c4c9fc3c94-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-s4zck\" (UID: \"e1e57c1c-b54c-461c-b834-b5c4c9fc3c94\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471699 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cv5s\" (UniqueName: \"kubernetes.io/projected/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-kube-api-access-8cv5s\") pod \"nmstate-console-plugin-6ff7998486-8xq5d\" (UID: \"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.471794 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/9cdef900-29f1-4965-a889-4086d7422f44-ovs-socket\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.472649 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/9cdef900-29f1-4965-a889-4086d7422f44-dbus-socket\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.472688 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/9cdef900-29f1-4965-a889-4086d7422f44-nmstate-lock\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: E1213 03:23:14.472748 5070 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 13 03:23:14 crc kubenswrapper[5070]: E1213 03:23:14.472792 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e57c1c-b54c-461c-b834-b5c4c9fc3c94-tls-key-pair podName:e1e57c1c-b54c-461c-b834-b5c4c9fc3c94 nodeName:}" failed. No retries permitted until 2025-12-13 03:23:14.972775491 +0000 UTC m=+687.208619037 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/e1e57c1c-b54c-461c-b834-b5c4c9fc3c94-tls-key-pair") pod "nmstate-webhook-f8fb84555-s4zck" (UID: "e1e57c1c-b54c-461c-b834-b5c4c9fc3c94") : secret "openshift-nmstate-webhook" not found Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.491546 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bls4l\" (UniqueName: \"kubernetes.io/projected/e1e57c1c-b54c-461c-b834-b5c4c9fc3c94-kube-api-access-bls4l\") pod \"nmstate-webhook-f8fb84555-s4zck\" (UID: \"e1e57c1c-b54c-461c-b834-b5c4c9fc3c94\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.492249 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv262\" (UniqueName: \"kubernetes.io/projected/9cdef900-29f1-4965-a889-4086d7422f44-kube-api-access-hv262\") pod \"nmstate-handler-srhsc\" (UID: \"9cdef900-29f1-4965-a889-4086d7422f44\") " pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.495604 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82pzv\" (UniqueName: \"kubernetes.io/projected/7579c85c-ab24-4642-9bde-28ab06c7db9c-kube-api-access-82pzv\") pod \"nmstate-metrics-7f7f7578db-j499r\" (UID: \"7579c85c-ab24-4642-9bde-28ab06c7db9c\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-j499r" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.573136 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-8xq5d\" (UID: \"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.573268 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cv5s\" (UniqueName: \"kubernetes.io/projected/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-kube-api-access-8cv5s\") pod \"nmstate-console-plugin-6ff7998486-8xq5d\" (UID: \"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.573300 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-8xq5d\" (UID: \"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:14 crc kubenswrapper[5070]: E1213 03:23:14.573436 5070 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 13 03:23:14 crc kubenswrapper[5070]: E1213 03:23:14.573525 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-plugin-serving-cert podName:ff50d9d0-6e48-4570-89e4-f4a1eb0e8012 nodeName:}" failed. No retries permitted until 2025-12-13 03:23:15.073506068 +0000 UTC m=+687.309349624 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-plugin-serving-cert") pod "nmstate-console-plugin-6ff7998486-8xq5d" (UID: "ff50d9d0-6e48-4570-89e4-f4a1eb0e8012") : secret "plugin-serving-cert" not found Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.574510 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-8xq5d\" (UID: \"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.581920 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-j499r" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.595622 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cv5s\" (UniqueName: \"kubernetes.io/projected/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-kube-api-access-8cv5s\") pod \"nmstate-console-plugin-6ff7998486-8xq5d\" (UID: \"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.605778 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-847f9ffd8b-fxf72"] Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.606394 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.649782 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.654900 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-847f9ffd8b-fxf72"] Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.673601 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf8cr\" (UniqueName: \"kubernetes.io/projected/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-kube-api-access-tf8cr\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.673636 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-console-serving-cert\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.673672 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-trusted-ca-bundle\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.673708 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-console-config\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.673729 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-oauth-serving-cert\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.673748 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-console-oauth-config\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.673906 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-service-ca\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.775610 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf8cr\" (UniqueName: \"kubernetes.io/projected/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-kube-api-access-tf8cr\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.775935 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-console-serving-cert\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.775976 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-trusted-ca-bundle\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.776006 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-console-config\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.776034 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-oauth-serving-cert\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.776061 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-console-oauth-config\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.776087 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-service-ca\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.777241 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-service-ca\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.777536 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-oauth-serving-cert\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.777555 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-console-config\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.778252 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-trusted-ca-bundle\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.781224 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-console-oauth-config\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.781625 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-console-serving-cert\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.794884 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf8cr\" (UniqueName: \"kubernetes.io/projected/3fdd94cb-2cd1-4fe6-a580-d6032ae695b0-kube-api-access-tf8cr\") pod \"console-847f9ffd8b-fxf72\" (UID: \"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0\") " pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.966225 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.978301 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e1e57c1c-b54c-461c-b834-b5c4c9fc3c94-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-s4zck\" (UID: \"e1e57c1c-b54c-461c-b834-b5c4c9fc3c94\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:14 crc kubenswrapper[5070]: I1213 03:23:14.983186 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e1e57c1c-b54c-461c-b834-b5c4c9fc3c94-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-s4zck\" (UID: \"e1e57c1c-b54c-461c-b834-b5c4c9fc3c94\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.079570 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-8xq5d\" (UID: \"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.082877 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ff50d9d0-6e48-4570-89e4-f4a1eb0e8012-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-8xq5d\" (UID: \"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.116960 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-j499r"] Dec 13 03:23:15 crc kubenswrapper[5070]: W1213 03:23:15.127380 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7579c85c_ab24_4642_9bde_28ab06c7db9c.slice/crio-df90f2b57198a1abcf80faed3bacb462dbc9493b8ffef91fa3a3f3d3abab8663 WatchSource:0}: Error finding container df90f2b57198a1abcf80faed3bacb462dbc9493b8ffef91fa3a3f3d3abab8663: Status 404 returned error can't find the container with id df90f2b57198a1abcf80faed3bacb462dbc9493b8ffef91fa3a3f3d3abab8663 Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.197733 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.202111 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-847f9ffd8b-fxf72"] Dec 13 03:23:15 crc kubenswrapper[5070]: W1213 03:23:15.208328 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fdd94cb_2cd1_4fe6_a580_d6032ae695b0.slice/crio-7e5ac82ffbce011d2b80634722a6e6a3cb9646d583123a3ae5a0fb9ca104971f WatchSource:0}: Error finding container 7e5ac82ffbce011d2b80634722a6e6a3cb9646d583123a3ae5a0fb9ca104971f: Status 404 returned error can't find the container with id 7e5ac82ffbce011d2b80634722a6e6a3cb9646d583123a3ae5a0fb9ca104971f Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.243215 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-847f9ffd8b-fxf72" event={"ID":"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0","Type":"ContainerStarted","Data":"7e5ac82ffbce011d2b80634722a6e6a3cb9646d583123a3ae5a0fb9ca104971f"} Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.244529 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-srhsc" event={"ID":"9cdef900-29f1-4965-a889-4086d7422f44","Type":"ContainerStarted","Data":"48d52695d06186bda03e2aa2b6d475f84e8994d504421081aaddadc25a98b082"} Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.245560 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-j499r" event={"ID":"7579c85c-ab24-4642-9bde-28ab06c7db9c","Type":"ContainerStarted","Data":"df90f2b57198a1abcf80faed3bacb462dbc9493b8ffef91fa3a3f3d3abab8663"} Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.330161 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.376053 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-s4zck"] Dec 13 03:23:15 crc kubenswrapper[5070]: W1213 03:23:15.382837 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1e57c1c_b54c_461c_b834_b5c4c9fc3c94.slice/crio-31515e2cd591215f63bb2581fef3470e5ff16b027510f0ebbb95d1f46a6e60bb WatchSource:0}: Error finding container 31515e2cd591215f63bb2581fef3470e5ff16b027510f0ebbb95d1f46a6e60bb: Status 404 returned error can't find the container with id 31515e2cd591215f63bb2581fef3470e5ff16b027510f0ebbb95d1f46a6e60bb Dec 13 03:23:15 crc kubenswrapper[5070]: I1213 03:23:15.527386 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d"] Dec 13 03:23:16 crc kubenswrapper[5070]: I1213 03:23:16.180369 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z9z2l"] Dec 13 03:23:16 crc kubenswrapper[5070]: I1213 03:23:16.180869 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z9z2l" podUID="20629986-5936-4dbd-b551-24dbf5efc93e" containerName="registry-server" containerID="cri-o://82b65dafe74a30143479f9c89214c27e250f187063649f63af9ba859b7570fc7" gracePeriod=2 Dec 13 03:23:16 crc kubenswrapper[5070]: I1213 03:23:16.254015 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" event={"ID":"e1e57c1c-b54c-461c-b834-b5c4c9fc3c94","Type":"ContainerStarted","Data":"31515e2cd591215f63bb2581fef3470e5ff16b027510f0ebbb95d1f46a6e60bb"} Dec 13 03:23:16 crc kubenswrapper[5070]: I1213 03:23:16.255653 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-847f9ffd8b-fxf72" event={"ID":"3fdd94cb-2cd1-4fe6-a580-d6032ae695b0","Type":"ContainerStarted","Data":"185723c007fa4fb414f019f99b2f5693f85db6c1e2497f5b6077f1d41610e5f8"} Dec 13 03:23:16 crc kubenswrapper[5070]: I1213 03:23:16.259337 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" event={"ID":"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012","Type":"ContainerStarted","Data":"fc9ce5601467283edf2aa0bfa7bc4c27f075c0e1d8e22d1a2f8011eb4cd2e782"} Dec 13 03:23:16 crc kubenswrapper[5070]: I1213 03:23:16.274235 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-847f9ffd8b-fxf72" podStartSLOduration=2.274215811 podStartE2EDuration="2.274215811s" podCreationTimestamp="2025-12-13 03:23:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:23:16.271510197 +0000 UTC m=+688.507353763" watchObservedRunningTime="2025-12-13 03:23:16.274215811 +0000 UTC m=+688.510059357" Dec 13 03:23:17 crc kubenswrapper[5070]: I1213 03:23:17.265987 5070 generic.go:334] "Generic (PLEG): container finished" podID="20629986-5936-4dbd-b551-24dbf5efc93e" containerID="82b65dafe74a30143479f9c89214c27e250f187063649f63af9ba859b7570fc7" exitCode=0 Dec 13 03:23:17 crc kubenswrapper[5070]: I1213 03:23:17.266063 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9z2l" event={"ID":"20629986-5936-4dbd-b551-24dbf5efc93e","Type":"ContainerDied","Data":"82b65dafe74a30143479f9c89214c27e250f187063649f63af9ba859b7570fc7"} Dec 13 03:23:17 crc kubenswrapper[5070]: I1213 03:23:17.927661 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.121932 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-utilities\") pod \"20629986-5936-4dbd-b551-24dbf5efc93e\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.122035 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-catalog-content\") pod \"20629986-5936-4dbd-b551-24dbf5efc93e\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.122268 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8f9sj\" (UniqueName: \"kubernetes.io/projected/20629986-5936-4dbd-b551-24dbf5efc93e-kube-api-access-8f9sj\") pod \"20629986-5936-4dbd-b551-24dbf5efc93e\" (UID: \"20629986-5936-4dbd-b551-24dbf5efc93e\") " Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.123080 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-utilities" (OuterVolumeSpecName: "utilities") pod "20629986-5936-4dbd-b551-24dbf5efc93e" (UID: "20629986-5936-4dbd-b551-24dbf5efc93e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.123603 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.127601 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20629986-5936-4dbd-b551-24dbf5efc93e-kube-api-access-8f9sj" (OuterVolumeSpecName: "kube-api-access-8f9sj") pod "20629986-5936-4dbd-b551-24dbf5efc93e" (UID: "20629986-5936-4dbd-b551-24dbf5efc93e"). InnerVolumeSpecName "kube-api-access-8f9sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.224151 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8f9sj\" (UniqueName: \"kubernetes.io/projected/20629986-5936-4dbd-b551-24dbf5efc93e-kube-api-access-8f9sj\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.246411 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "20629986-5936-4dbd-b551-24dbf5efc93e" (UID: "20629986-5936-4dbd-b551-24dbf5efc93e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.274175 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9z2l" event={"ID":"20629986-5936-4dbd-b551-24dbf5efc93e","Type":"ContainerDied","Data":"0bab7d8dd7a5ba6bb483c8a882c934dadeb3967864c59d72027f70302f6331c4"} Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.274232 5070 scope.go:117] "RemoveContainer" containerID="82b65dafe74a30143479f9c89214c27e250f187063649f63af9ba859b7570fc7" Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.274234 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z9z2l" Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.304916 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z9z2l"] Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.309112 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z9z2l"] Dec 13 03:23:18 crc kubenswrapper[5070]: I1213 03:23:18.325020 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20629986-5936-4dbd-b551-24dbf5efc93e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:19 crc kubenswrapper[5070]: I1213 03:23:19.129158 5070 scope.go:117] "RemoveContainer" containerID="efc5bd68e4c4eb90c2a40bf34dc49719bf3c21c05057e578c7cee891b3ed4003" Dec 13 03:23:19 crc kubenswrapper[5070]: I1213 03:23:19.273765 5070 scope.go:117] "RemoveContainer" containerID="fa1aa7c20a52cbdf94cd7c495bbc601a91380d8c4e2a8ae8ea679e9d6a4b8406" Dec 13 03:23:20 crc kubenswrapper[5070]: I1213 03:23:20.174763 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20629986-5936-4dbd-b551-24dbf5efc93e" path="/var/lib/kubelet/pods/20629986-5936-4dbd-b551-24dbf5efc93e/volumes" Dec 13 03:23:20 crc kubenswrapper[5070]: I1213 03:23:20.287753 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-srhsc" event={"ID":"9cdef900-29f1-4965-a889-4086d7422f44","Type":"ContainerStarted","Data":"e7f0c5948e1285dc008a6ba627254be50e9d2672644fc617cb271e2bab987cb1"} Dec 13 03:23:20 crc kubenswrapper[5070]: I1213 03:23:20.287909 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:20 crc kubenswrapper[5070]: I1213 03:23:20.289433 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" event={"ID":"ff50d9d0-6e48-4570-89e4-f4a1eb0e8012","Type":"ContainerStarted","Data":"9fc6eb6598877ef23e2e1a471be67857d0113a253c9dfd9b4719df7041256cae"} Dec 13 03:23:20 crc kubenswrapper[5070]: I1213 03:23:20.292375 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" event={"ID":"e1e57c1c-b54c-461c-b834-b5c4c9fc3c94","Type":"ContainerStarted","Data":"714d3567f4d6f65523f570fba08b0c812f32bb90bef6a8518e9a32ebc421311f"} Dec 13 03:23:20 crc kubenswrapper[5070]: I1213 03:23:20.292885 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:20 crc kubenswrapper[5070]: I1213 03:23:20.294653 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-j499r" event={"ID":"7579c85c-ab24-4642-9bde-28ab06c7db9c","Type":"ContainerStarted","Data":"54fadefd781e90d3cc2e1f1a20e8dd6ab1cd926526d84afc020bed8ab638acfd"} Dec 13 03:23:20 crc kubenswrapper[5070]: I1213 03:23:20.309777 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-srhsc" podStartSLOduration=1.704407225 podStartE2EDuration="6.309754208s" podCreationTimestamp="2025-12-13 03:23:14 +0000 UTC" firstStartedPulling="2025-12-13 03:23:14.676173989 +0000 UTC m=+686.912017535" lastFinishedPulling="2025-12-13 03:23:19.281520952 +0000 UTC m=+691.517364518" observedRunningTime="2025-12-13 03:23:20.304526345 +0000 UTC m=+692.540369901" watchObservedRunningTime="2025-12-13 03:23:20.309754208 +0000 UTC m=+692.545597774" Dec 13 03:23:20 crc kubenswrapper[5070]: I1213 03:23:20.320644 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" podStartSLOduration=2.404079458 podStartE2EDuration="6.320622746s" podCreationTimestamp="2025-12-13 03:23:14 +0000 UTC" firstStartedPulling="2025-12-13 03:23:15.385064543 +0000 UTC m=+687.620908079" lastFinishedPulling="2025-12-13 03:23:19.301607821 +0000 UTC m=+691.537451367" observedRunningTime="2025-12-13 03:23:20.317643504 +0000 UTC m=+692.553487080" watchObservedRunningTime="2025-12-13 03:23:20.320622746 +0000 UTC m=+692.556466312" Dec 13 03:23:20 crc kubenswrapper[5070]: I1213 03:23:20.338170 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8xq5d" podStartSLOduration=2.5948014969999997 podStartE2EDuration="6.338153305s" podCreationTimestamp="2025-12-13 03:23:14 +0000 UTC" firstStartedPulling="2025-12-13 03:23:15.539392527 +0000 UTC m=+687.775236063" lastFinishedPulling="2025-12-13 03:23:19.282744325 +0000 UTC m=+691.518587871" observedRunningTime="2025-12-13 03:23:20.330415653 +0000 UTC m=+692.566259209" watchObservedRunningTime="2025-12-13 03:23:20.338153305 +0000 UTC m=+692.573996851" Dec 13 03:23:22 crc kubenswrapper[5070]: I1213 03:23:22.308926 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-j499r" event={"ID":"7579c85c-ab24-4642-9bde-28ab06c7db9c","Type":"ContainerStarted","Data":"b99fe0b465717fd172a990b6fc6540e94b7bd2cc069a7856f39318cd50180048"} Dec 13 03:23:22 crc kubenswrapper[5070]: I1213 03:23:22.339347 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-j499r" podStartSLOduration=1.6864373019999999 podStartE2EDuration="8.339319284s" podCreationTimestamp="2025-12-13 03:23:14 +0000 UTC" firstStartedPulling="2025-12-13 03:23:15.129619691 +0000 UTC m=+687.365463237" lastFinishedPulling="2025-12-13 03:23:21.782501673 +0000 UTC m=+694.018345219" observedRunningTime="2025-12-13 03:23:22.337960177 +0000 UTC m=+694.573803723" watchObservedRunningTime="2025-12-13 03:23:22.339319284 +0000 UTC m=+694.575162870" Dec 13 03:23:24 crc kubenswrapper[5070]: I1213 03:23:24.690734 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-srhsc" Dec 13 03:23:24 crc kubenswrapper[5070]: I1213 03:23:24.966953 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:24 crc kubenswrapper[5070]: I1213 03:23:24.967045 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:24 crc kubenswrapper[5070]: I1213 03:23:24.973854 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:25 crc kubenswrapper[5070]: I1213 03:23:25.332138 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-847f9ffd8b-fxf72" Dec 13 03:23:25 crc kubenswrapper[5070]: I1213 03:23:25.394256 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-z6jvd"] Dec 13 03:23:35 crc kubenswrapper[5070]: I1213 03:23:35.203225 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-f8fb84555-s4zck" Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.808329 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n"] Dec 13 03:23:49 crc kubenswrapper[5070]: E1213 03:23:49.809098 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20629986-5936-4dbd-b551-24dbf5efc93e" containerName="extract-content" Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.809113 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="20629986-5936-4dbd-b551-24dbf5efc93e" containerName="extract-content" Dec 13 03:23:49 crc kubenswrapper[5070]: E1213 03:23:49.809126 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20629986-5936-4dbd-b551-24dbf5efc93e" containerName="extract-utilities" Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.809134 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="20629986-5936-4dbd-b551-24dbf5efc93e" containerName="extract-utilities" Dec 13 03:23:49 crc kubenswrapper[5070]: E1213 03:23:49.809150 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20629986-5936-4dbd-b551-24dbf5efc93e" containerName="registry-server" Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.809157 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="20629986-5936-4dbd-b551-24dbf5efc93e" containerName="registry-server" Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.809284 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="20629986-5936-4dbd-b551-24dbf5efc93e" containerName="registry-server" Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.810104 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.813018 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.821609 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n"] Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.916835 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.917169 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:49 crc kubenswrapper[5070]: I1213 03:23:49.917293 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bfl6\" (UniqueName: \"kubernetes.io/projected/b7800c1a-3a8e-4647-91be-97515b5c094c-kube-api-access-5bfl6\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.018652 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.018732 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bfl6\" (UniqueName: \"kubernetes.io/projected/b7800c1a-3a8e-4647-91be-97515b5c094c-kube-api-access-5bfl6\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.018827 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.019332 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.019581 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.037164 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bfl6\" (UniqueName: \"kubernetes.io/projected/b7800c1a-3a8e-4647-91be-97515b5c094c-kube-api-access-5bfl6\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.130996 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.343524 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n"] Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.488194 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-z6jvd" podUID="7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" containerName="console" containerID="cri-o://a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7" gracePeriod=15 Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.488860 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" event={"ID":"b7800c1a-3a8e-4647-91be-97515b5c094c","Type":"ContainerStarted","Data":"1a168fdc62afed76f6255b38274ed1e7e2f3abdd1c2f8a4170512786b2d387db"} Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.488927 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" event={"ID":"b7800c1a-3a8e-4647-91be-97515b5c094c","Type":"ContainerStarted","Data":"e89c55cfc99a0ba582fd1cbc187a9df4b1df319b9a1efa9cf02d873c5530eadd"} Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.785190 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-z6jvd_7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4/console/0.log" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.785282 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.931267 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-trusted-ca-bundle\") pod \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.931732 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-serving-cert\") pod \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.931765 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-oauth-serving-cert\") pod \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.931801 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-service-ca\") pod \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.931826 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-oauth-config\") pod \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.931869 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-config\") pod \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.931960 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9dbq\" (UniqueName: \"kubernetes.io/projected/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-kube-api-access-c9dbq\") pod \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\" (UID: \"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4\") " Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.932139 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" (UID: "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.932356 5070 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.932771 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-config" (OuterVolumeSpecName: "console-config") pod "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" (UID: "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.933030 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-service-ca" (OuterVolumeSpecName: "service-ca") pod "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" (UID: "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.933116 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" (UID: "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.936832 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" (UID: "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.936962 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" (UID: "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:23:50 crc kubenswrapper[5070]: I1213 03:23:50.937676 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-kube-api-access-c9dbq" (OuterVolumeSpecName: "kube-api-access-c9dbq") pod "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" (UID: "7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4"). InnerVolumeSpecName "kube-api-access-c9dbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.033609 5070 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.033681 5070 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.033693 5070 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.033701 5070 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.033711 5070 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-console-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.033718 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9dbq\" (UniqueName: \"kubernetes.io/projected/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4-kube-api-access-c9dbq\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.497764 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-z6jvd_7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4/console/0.log" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.497848 5070 generic.go:334] "Generic (PLEG): container finished" podID="7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" containerID="a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7" exitCode=2 Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.497950 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z6jvd" event={"ID":"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4","Type":"ContainerDied","Data":"a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7"} Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.497956 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z6jvd" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.498651 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z6jvd" event={"ID":"7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4","Type":"ContainerDied","Data":"bb4175f0ff80d861a7326af9daada06b5b3e0c4fa4c548892966a1ced6fc4ba0"} Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.498699 5070 scope.go:117] "RemoveContainer" containerID="a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.502990 5070 generic.go:334] "Generic (PLEG): container finished" podID="b7800c1a-3a8e-4647-91be-97515b5c094c" containerID="1a168fdc62afed76f6255b38274ed1e7e2f3abdd1c2f8a4170512786b2d387db" exitCode=0 Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.503051 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" event={"ID":"b7800c1a-3a8e-4647-91be-97515b5c094c","Type":"ContainerDied","Data":"1a168fdc62afed76f6255b38274ed1e7e2f3abdd1c2f8a4170512786b2d387db"} Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.523558 5070 scope.go:117] "RemoveContainer" containerID="a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7" Dec 13 03:23:51 crc kubenswrapper[5070]: E1213 03:23:51.524088 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7\": container with ID starting with a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7 not found: ID does not exist" containerID="a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.524149 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7"} err="failed to get container status \"a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7\": rpc error: code = NotFound desc = could not find container \"a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7\": container with ID starting with a003fda99e7b1776078b1e821d83e616b36f25462327fb6e4aa2ddf50ac231b7 not found: ID does not exist" Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.535564 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-z6jvd"] Dec 13 03:23:51 crc kubenswrapper[5070]: I1213 03:23:51.539071 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-z6jvd"] Dec 13 03:23:52 crc kubenswrapper[5070]: I1213 03:23:52.176413 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" path="/var/lib/kubelet/pods/7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4/volumes" Dec 13 03:23:53 crc kubenswrapper[5070]: I1213 03:23:53.520794 5070 generic.go:334] "Generic (PLEG): container finished" podID="b7800c1a-3a8e-4647-91be-97515b5c094c" containerID="a32e75a5059e5ac313053491229ed6c73b4b1b6d1b5b237c821a3272210f4069" exitCode=0 Dec 13 03:23:53 crc kubenswrapper[5070]: I1213 03:23:53.520852 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" event={"ID":"b7800c1a-3a8e-4647-91be-97515b5c094c","Type":"ContainerDied","Data":"a32e75a5059e5ac313053491229ed6c73b4b1b6d1b5b237c821a3272210f4069"} Dec 13 03:23:54 crc kubenswrapper[5070]: I1213 03:23:54.528387 5070 generic.go:334] "Generic (PLEG): container finished" podID="b7800c1a-3a8e-4647-91be-97515b5c094c" containerID="5211f8ed7989a05bf8ce58ad208762edeaba2dcaefaf7a3b0b4fbf7e81c9a984" exitCode=0 Dec 13 03:23:54 crc kubenswrapper[5070]: I1213 03:23:54.528484 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" event={"ID":"b7800c1a-3a8e-4647-91be-97515b5c094c","Type":"ContainerDied","Data":"5211f8ed7989a05bf8ce58ad208762edeaba2dcaefaf7a3b0b4fbf7e81c9a984"} Dec 13 03:23:55 crc kubenswrapper[5070]: I1213 03:23:55.760817 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:23:55 crc kubenswrapper[5070]: I1213 03:23:55.897893 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bfl6\" (UniqueName: \"kubernetes.io/projected/b7800c1a-3a8e-4647-91be-97515b5c094c-kube-api-access-5bfl6\") pod \"b7800c1a-3a8e-4647-91be-97515b5c094c\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " Dec 13 03:23:55 crc kubenswrapper[5070]: I1213 03:23:55.897992 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-util\") pod \"b7800c1a-3a8e-4647-91be-97515b5c094c\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " Dec 13 03:23:55 crc kubenswrapper[5070]: I1213 03:23:55.898021 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-bundle\") pod \"b7800c1a-3a8e-4647-91be-97515b5c094c\" (UID: \"b7800c1a-3a8e-4647-91be-97515b5c094c\") " Dec 13 03:23:55 crc kubenswrapper[5070]: I1213 03:23:55.899047 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-bundle" (OuterVolumeSpecName: "bundle") pod "b7800c1a-3a8e-4647-91be-97515b5c094c" (UID: "b7800c1a-3a8e-4647-91be-97515b5c094c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:23:55 crc kubenswrapper[5070]: I1213 03:23:55.903075 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7800c1a-3a8e-4647-91be-97515b5c094c-kube-api-access-5bfl6" (OuterVolumeSpecName: "kube-api-access-5bfl6") pod "b7800c1a-3a8e-4647-91be-97515b5c094c" (UID: "b7800c1a-3a8e-4647-91be-97515b5c094c"). InnerVolumeSpecName "kube-api-access-5bfl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:23:55 crc kubenswrapper[5070]: I1213 03:23:55.999279 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bfl6\" (UniqueName: \"kubernetes.io/projected/b7800c1a-3a8e-4647-91be-97515b5c094c-kube-api-access-5bfl6\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:55 crc kubenswrapper[5070]: I1213 03:23:55.999308 5070 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:56 crc kubenswrapper[5070]: I1213 03:23:56.148866 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-util" (OuterVolumeSpecName: "util") pod "b7800c1a-3a8e-4647-91be-97515b5c094c" (UID: "b7800c1a-3a8e-4647-91be-97515b5c094c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:23:56 crc kubenswrapper[5070]: I1213 03:23:56.201987 5070 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b7800c1a-3a8e-4647-91be-97515b5c094c-util\") on node \"crc\" DevicePath \"\"" Dec 13 03:23:56 crc kubenswrapper[5070]: I1213 03:23:56.539748 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" event={"ID":"b7800c1a-3a8e-4647-91be-97515b5c094c","Type":"ContainerDied","Data":"e89c55cfc99a0ba582fd1cbc187a9df4b1df319b9a1efa9cf02d873c5530eadd"} Dec 13 03:23:56 crc kubenswrapper[5070]: I1213 03:23:56.539793 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e89c55cfc99a0ba582fd1cbc187a9df4b1df319b9a1efa9cf02d873c5530eadd" Dec 13 03:23:56 crc kubenswrapper[5070]: I1213 03:23:56.539798 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.454707 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7"] Dec 13 03:24:08 crc kubenswrapper[5070]: E1213 03:24:08.455539 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7800c1a-3a8e-4647-91be-97515b5c094c" containerName="pull" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.455555 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7800c1a-3a8e-4647-91be-97515b5c094c" containerName="pull" Dec 13 03:24:08 crc kubenswrapper[5070]: E1213 03:24:08.455570 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7800c1a-3a8e-4647-91be-97515b5c094c" containerName="extract" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.455578 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7800c1a-3a8e-4647-91be-97515b5c094c" containerName="extract" Dec 13 03:24:08 crc kubenswrapper[5070]: E1213 03:24:08.455588 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7800c1a-3a8e-4647-91be-97515b5c094c" containerName="util" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.455596 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7800c1a-3a8e-4647-91be-97515b5c094c" containerName="util" Dec 13 03:24:08 crc kubenswrapper[5070]: E1213 03:24:08.455609 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" containerName="console" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.455616 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" containerName="console" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.455754 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dd4f70a-3e48-4dcd-8e65-a9ee2430dfc4" containerName="console" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.455766 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7800c1a-3a8e-4647-91be-97515b5c094c" containerName="extract" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.456247 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.458682 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.458898 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.458902 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-drcb4" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.459544 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.459769 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.481625 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7"] Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.563880 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e944687d-2c42-446f-bc22-a7885909cead-apiservice-cert\") pod \"metallb-operator-controller-manager-58d87b8dcc-6wpw7\" (UID: \"e944687d-2c42-446f-bc22-a7885909cead\") " pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.563973 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npcsp\" (UniqueName: \"kubernetes.io/projected/e944687d-2c42-446f-bc22-a7885909cead-kube-api-access-npcsp\") pod \"metallb-operator-controller-manager-58d87b8dcc-6wpw7\" (UID: \"e944687d-2c42-446f-bc22-a7885909cead\") " pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.564053 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e944687d-2c42-446f-bc22-a7885909cead-webhook-cert\") pod \"metallb-operator-controller-manager-58d87b8dcc-6wpw7\" (UID: \"e944687d-2c42-446f-bc22-a7885909cead\") " pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.665698 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npcsp\" (UniqueName: \"kubernetes.io/projected/e944687d-2c42-446f-bc22-a7885909cead-kube-api-access-npcsp\") pod \"metallb-operator-controller-manager-58d87b8dcc-6wpw7\" (UID: \"e944687d-2c42-446f-bc22-a7885909cead\") " pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.665787 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e944687d-2c42-446f-bc22-a7885909cead-webhook-cert\") pod \"metallb-operator-controller-manager-58d87b8dcc-6wpw7\" (UID: \"e944687d-2c42-446f-bc22-a7885909cead\") " pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.665850 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e944687d-2c42-446f-bc22-a7885909cead-apiservice-cert\") pod \"metallb-operator-controller-manager-58d87b8dcc-6wpw7\" (UID: \"e944687d-2c42-446f-bc22-a7885909cead\") " pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.671750 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e944687d-2c42-446f-bc22-a7885909cead-webhook-cert\") pod \"metallb-operator-controller-manager-58d87b8dcc-6wpw7\" (UID: \"e944687d-2c42-446f-bc22-a7885909cead\") " pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.684370 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e944687d-2c42-446f-bc22-a7885909cead-apiservice-cert\") pod \"metallb-operator-controller-manager-58d87b8dcc-6wpw7\" (UID: \"e944687d-2c42-446f-bc22-a7885909cead\") " pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.693111 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npcsp\" (UniqueName: \"kubernetes.io/projected/e944687d-2c42-446f-bc22-a7885909cead-kube-api-access-npcsp\") pod \"metallb-operator-controller-manager-58d87b8dcc-6wpw7\" (UID: \"e944687d-2c42-446f-bc22-a7885909cead\") " pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.772957 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.914499 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9"] Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.917500 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.921814 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-w9k5b" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.933037 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.946836 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 13 03:24:08 crc kubenswrapper[5070]: I1213 03:24:08.952615 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9"] Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.071305 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/68ca8795-fc30-4cbe-a185-80555ab011d1-apiservice-cert\") pod \"metallb-operator-webhook-server-6f764c67c4-2kqc9\" (UID: \"68ca8795-fc30-4cbe-a185-80555ab011d1\") " pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.071752 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5khvt\" (UniqueName: \"kubernetes.io/projected/68ca8795-fc30-4cbe-a185-80555ab011d1-kube-api-access-5khvt\") pod \"metallb-operator-webhook-server-6f764c67c4-2kqc9\" (UID: \"68ca8795-fc30-4cbe-a185-80555ab011d1\") " pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.071816 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/68ca8795-fc30-4cbe-a185-80555ab011d1-webhook-cert\") pod \"metallb-operator-webhook-server-6f764c67c4-2kqc9\" (UID: \"68ca8795-fc30-4cbe-a185-80555ab011d1\") " pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.173405 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5khvt\" (UniqueName: \"kubernetes.io/projected/68ca8795-fc30-4cbe-a185-80555ab011d1-kube-api-access-5khvt\") pod \"metallb-operator-webhook-server-6f764c67c4-2kqc9\" (UID: \"68ca8795-fc30-4cbe-a185-80555ab011d1\") " pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.173512 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/68ca8795-fc30-4cbe-a185-80555ab011d1-webhook-cert\") pod \"metallb-operator-webhook-server-6f764c67c4-2kqc9\" (UID: \"68ca8795-fc30-4cbe-a185-80555ab011d1\") " pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.173574 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/68ca8795-fc30-4cbe-a185-80555ab011d1-apiservice-cert\") pod \"metallb-operator-webhook-server-6f764c67c4-2kqc9\" (UID: \"68ca8795-fc30-4cbe-a185-80555ab011d1\") " pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.182708 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/68ca8795-fc30-4cbe-a185-80555ab011d1-webhook-cert\") pod \"metallb-operator-webhook-server-6f764c67c4-2kqc9\" (UID: \"68ca8795-fc30-4cbe-a185-80555ab011d1\") " pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.186156 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/68ca8795-fc30-4cbe-a185-80555ab011d1-apiservice-cert\") pod \"metallb-operator-webhook-server-6f764c67c4-2kqc9\" (UID: \"68ca8795-fc30-4cbe-a185-80555ab011d1\") " pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.197079 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5khvt\" (UniqueName: \"kubernetes.io/projected/68ca8795-fc30-4cbe-a185-80555ab011d1-kube-api-access-5khvt\") pod \"metallb-operator-webhook-server-6f764c67c4-2kqc9\" (UID: \"68ca8795-fc30-4cbe-a185-80555ab011d1\") " pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.255020 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.354547 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7"] Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.517241 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9"] Dec 13 03:24:09 crc kubenswrapper[5070]: W1213 03:24:09.521916 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68ca8795_fc30_4cbe_a185_80555ab011d1.slice/crio-1759ea3dc938c1ee287c3ac50551ce4e61e2a52eff6a8c2d09c7534760f115ba WatchSource:0}: Error finding container 1759ea3dc938c1ee287c3ac50551ce4e61e2a52eff6a8c2d09c7534760f115ba: Status 404 returned error can't find the container with id 1759ea3dc938c1ee287c3ac50551ce4e61e2a52eff6a8c2d09c7534760f115ba Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.608309 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" event={"ID":"68ca8795-fc30-4cbe-a185-80555ab011d1","Type":"ContainerStarted","Data":"1759ea3dc938c1ee287c3ac50551ce4e61e2a52eff6a8c2d09c7534760f115ba"} Dec 13 03:24:09 crc kubenswrapper[5070]: I1213 03:24:09.610096 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" event={"ID":"e944687d-2c42-446f-bc22-a7885909cead","Type":"ContainerStarted","Data":"d447360c84db913d92447cf6c8c45bff18ee02002f53054d14d43dd4832493b9"} Dec 13 03:24:17 crc kubenswrapper[5070]: I1213 03:24:17.662026 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" event={"ID":"68ca8795-fc30-4cbe-a185-80555ab011d1","Type":"ContainerStarted","Data":"8e184d668c25b6e5fdbe8ec294180885f705c2b076577ac8f31b6fd4083b3f6c"} Dec 13 03:24:17 crc kubenswrapper[5070]: I1213 03:24:17.662548 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:17 crc kubenswrapper[5070]: I1213 03:24:17.663405 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" event={"ID":"e944687d-2c42-446f-bc22-a7885909cead","Type":"ContainerStarted","Data":"d46ace88a1ef5a462e21952fca997e20509673b882bc4d062cfa2b5202c5ec36"} Dec 13 03:24:17 crc kubenswrapper[5070]: I1213 03:24:17.663551 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:17 crc kubenswrapper[5070]: I1213 03:24:17.682179 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" podStartSLOduration=2.206096285 podStartE2EDuration="9.682162357s" podCreationTimestamp="2025-12-13 03:24:08 +0000 UTC" firstStartedPulling="2025-12-13 03:24:09.524693865 +0000 UTC m=+741.760537411" lastFinishedPulling="2025-12-13 03:24:17.000759937 +0000 UTC m=+749.236603483" observedRunningTime="2025-12-13 03:24:17.680059799 +0000 UTC m=+749.915903335" watchObservedRunningTime="2025-12-13 03:24:17.682162357 +0000 UTC m=+749.918005903" Dec 13 03:24:17 crc kubenswrapper[5070]: I1213 03:24:17.714467 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" podStartSLOduration=2.092339087 podStartE2EDuration="9.714426609s" podCreationTimestamp="2025-12-13 03:24:08 +0000 UTC" firstStartedPulling="2025-12-13 03:24:09.363038957 +0000 UTC m=+741.598882503" lastFinishedPulling="2025-12-13 03:24:16.985126479 +0000 UTC m=+749.220970025" observedRunningTime="2025-12-13 03:24:17.712053354 +0000 UTC m=+749.947896950" watchObservedRunningTime="2025-12-13 03:24:17.714426609 +0000 UTC m=+749.950270165" Dec 13 03:24:29 crc kubenswrapper[5070]: I1213 03:24:29.260688 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6f764c67c4-2kqc9" Dec 13 03:24:48 crc kubenswrapper[5070]: I1213 03:24:48.776702 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-58d87b8dcc-6wpw7" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.562139 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq"] Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.563147 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.565250 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-8z7hp" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.565574 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.565910 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-4s5j9"] Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.567926 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.584000 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.584026 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.604669 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq"] Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.644855 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-metrics-certs\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.644917 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-frr-sockets\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.644958 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxb8z\" (UniqueName: \"kubernetes.io/projected/64134541-9479-4e28-9408-092333fa9e08-kube-api-access-dxb8z\") pod \"frr-k8s-webhook-server-7784b6fcf-jg4tq\" (UID: \"64134541-9479-4e28-9408-092333fa9e08\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.644992 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-frr-startup\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.645016 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tckmh\" (UniqueName: \"kubernetes.io/projected/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-kube-api-access-tckmh\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.645049 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-metrics\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.645103 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-reloader\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.645138 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-frr-conf\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.645161 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/64134541-9479-4e28-9408-092333fa9e08-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-jg4tq\" (UID: \"64134541-9479-4e28-9408-092333fa9e08\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.682634 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-8kznn"] Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.683888 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.691586 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-vg9p2" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.691767 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.691855 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.691951 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.697426 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5bddd4b946-wmrgw"] Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.698602 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.706623 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-wmrgw"] Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.706918 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.745976 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxb8z\" (UniqueName: \"kubernetes.io/projected/64134541-9479-4e28-9408-092333fa9e08-kube-api-access-dxb8z\") pod \"frr-k8s-webhook-server-7784b6fcf-jg4tq\" (UID: \"64134541-9479-4e28-9408-092333fa9e08\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746028 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-frr-startup\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746052 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tckmh\" (UniqueName: \"kubernetes.io/projected/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-kube-api-access-tckmh\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746076 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-metrics\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746112 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rxnm\" (UniqueName: \"kubernetes.io/projected/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-kube-api-access-9rxnm\") pod \"controller-5bddd4b946-wmrgw\" (UID: \"6f946df5-dc5e-4b3a-afd7-51209e6dd09b\") " pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746128 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-memberlist\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746157 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-reloader\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746184 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-metrics-certs\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746207 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8216ae30-2f9d-47c4-a748-3b0456ff9a27-metallb-excludel2\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746226 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-frr-conf\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746244 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/64134541-9479-4e28-9408-092333fa9e08-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-jg4tq\" (UID: \"64134541-9479-4e28-9408-092333fa9e08\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746261 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ncgw\" (UniqueName: \"kubernetes.io/projected/8216ae30-2f9d-47c4-a748-3b0456ff9a27-kube-api-access-2ncgw\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746277 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-cert\") pod \"controller-5bddd4b946-wmrgw\" (UID: \"6f946df5-dc5e-4b3a-afd7-51209e6dd09b\") " pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746296 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-metrics-certs\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746312 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-frr-sockets\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.746327 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-metrics-certs\") pod \"controller-5bddd4b946-wmrgw\" (UID: \"6f946df5-dc5e-4b3a-afd7-51209e6dd09b\") " pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.747390 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-frr-startup\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.747470 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-metrics\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.747592 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-frr-conf\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: E1213 03:24:49.747652 5070 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 13 03:24:49 crc kubenswrapper[5070]: E1213 03:24:49.747689 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-metrics-certs podName:26e460ad-a509-4c45-a5a7-64cd87d2a5f0 nodeName:}" failed. No retries permitted until 2025-12-13 03:24:50.247678094 +0000 UTC m=+782.483521640 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-metrics-certs") pod "frr-k8s-4s5j9" (UID: "26e460ad-a509-4c45-a5a7-64cd87d2a5f0") : secret "frr-k8s-certs-secret" not found Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.747696 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-reloader\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.750065 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-frr-sockets\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.753634 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/64134541-9479-4e28-9408-092333fa9e08-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-jg4tq\" (UID: \"64134541-9479-4e28-9408-092333fa9e08\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.765282 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tckmh\" (UniqueName: \"kubernetes.io/projected/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-kube-api-access-tckmh\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.766332 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxb8z\" (UniqueName: \"kubernetes.io/projected/64134541-9479-4e28-9408-092333fa9e08-kube-api-access-dxb8z\") pod \"frr-k8s-webhook-server-7784b6fcf-jg4tq\" (UID: \"64134541-9479-4e28-9408-092333fa9e08\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.847148 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rxnm\" (UniqueName: \"kubernetes.io/projected/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-kube-api-access-9rxnm\") pod \"controller-5bddd4b946-wmrgw\" (UID: \"6f946df5-dc5e-4b3a-afd7-51209e6dd09b\") " pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.847212 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-memberlist\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.847257 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-metrics-certs\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.847284 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8216ae30-2f9d-47c4-a748-3b0456ff9a27-metallb-excludel2\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.847318 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ncgw\" (UniqueName: \"kubernetes.io/projected/8216ae30-2f9d-47c4-a748-3b0456ff9a27-kube-api-access-2ncgw\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.847339 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-cert\") pod \"controller-5bddd4b946-wmrgw\" (UID: \"6f946df5-dc5e-4b3a-afd7-51209e6dd09b\") " pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.847380 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-metrics-certs\") pod \"controller-5bddd4b946-wmrgw\" (UID: \"6f946df5-dc5e-4b3a-afd7-51209e6dd09b\") " pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:49 crc kubenswrapper[5070]: E1213 03:24:49.847402 5070 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 13 03:24:49 crc kubenswrapper[5070]: E1213 03:24:49.847489 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-memberlist podName:8216ae30-2f9d-47c4-a748-3b0456ff9a27 nodeName:}" failed. No retries permitted until 2025-12-13 03:24:50.347468541 +0000 UTC m=+782.583312087 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-memberlist") pod "speaker-8kznn" (UID: "8216ae30-2f9d-47c4-a748-3b0456ff9a27") : secret "metallb-memberlist" not found Dec 13 03:24:49 crc kubenswrapper[5070]: E1213 03:24:49.847576 5070 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 13 03:24:49 crc kubenswrapper[5070]: E1213 03:24:49.847636 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-metrics-certs podName:6f946df5-dc5e-4b3a-afd7-51209e6dd09b nodeName:}" failed. No retries permitted until 2025-12-13 03:24:50.347616955 +0000 UTC m=+782.583460571 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-metrics-certs") pod "controller-5bddd4b946-wmrgw" (UID: "6f946df5-dc5e-4b3a-afd7-51209e6dd09b") : secret "controller-certs-secret" not found Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.848197 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8216ae30-2f9d-47c4-a748-3b0456ff9a27-metallb-excludel2\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.848843 5070 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.851391 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-metrics-certs\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.860856 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-cert\") pod \"controller-5bddd4b946-wmrgw\" (UID: \"6f946df5-dc5e-4b3a-afd7-51209e6dd09b\") " pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.866968 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rxnm\" (UniqueName: \"kubernetes.io/projected/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-kube-api-access-9rxnm\") pod \"controller-5bddd4b946-wmrgw\" (UID: \"6f946df5-dc5e-4b3a-afd7-51209e6dd09b\") " pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.867425 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ncgw\" (UniqueName: \"kubernetes.io/projected/8216ae30-2f9d-47c4-a748-3b0456ff9a27-kube-api-access-2ncgw\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:49 crc kubenswrapper[5070]: I1213 03:24:49.891240 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.262476 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-metrics-certs\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.267255 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/26e460ad-a509-4c45-a5a7-64cd87d2a5f0-metrics-certs\") pod \"frr-k8s-4s5j9\" (UID: \"26e460ad-a509-4c45-a5a7-64cd87d2a5f0\") " pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.363873 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-metrics-certs\") pod \"controller-5bddd4b946-wmrgw\" (UID: \"6f946df5-dc5e-4b3a-afd7-51209e6dd09b\") " pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.363941 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-memberlist\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:50 crc kubenswrapper[5070]: E1213 03:24:50.364016 5070 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 13 03:24:50 crc kubenswrapper[5070]: E1213 03:24:50.364089 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-memberlist podName:8216ae30-2f9d-47c4-a748-3b0456ff9a27 nodeName:}" failed. No retries permitted until 2025-12-13 03:24:51.364075288 +0000 UTC m=+783.599918834 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-memberlist") pod "speaker-8kznn" (UID: "8216ae30-2f9d-47c4-a748-3b0456ff9a27") : secret "metallb-memberlist" not found Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.368376 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f946df5-dc5e-4b3a-afd7-51209e6dd09b-metrics-certs\") pod \"controller-5bddd4b946-wmrgw\" (UID: \"6f946df5-dc5e-4b3a-afd7-51209e6dd09b\") " pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.510577 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.592182 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq"] Dec 13 03:24:50 crc kubenswrapper[5070]: W1213 03:24:50.598978 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64134541_9479_4e28_9408_092333fa9e08.slice/crio-387d91571ca44635e0714f7d66f13ff9e268dc2240e5959c45a3a809cac8e572 WatchSource:0}: Error finding container 387d91571ca44635e0714f7d66f13ff9e268dc2240e5959c45a3a809cac8e572: Status 404 returned error can't find the container with id 387d91571ca44635e0714f7d66f13ff9e268dc2240e5959c45a3a809cac8e572 Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.615761 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.826025 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-wmrgw"] Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.896223 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-wmrgw" event={"ID":"6f946df5-dc5e-4b3a-afd7-51209e6dd09b","Type":"ContainerStarted","Data":"20f384e08bc2113d304dd4972782ee48e5227ae79f8d865993a134efd1673a18"} Dec 13 03:24:50 crc kubenswrapper[5070]: I1213 03:24:50.898483 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" event={"ID":"64134541-9479-4e28-9408-092333fa9e08","Type":"ContainerStarted","Data":"387d91571ca44635e0714f7d66f13ff9e268dc2240e5959c45a3a809cac8e572"} Dec 13 03:24:51 crc kubenswrapper[5070]: I1213 03:24:51.383620 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-memberlist\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:51 crc kubenswrapper[5070]: I1213 03:24:51.391126 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8216ae30-2f9d-47c4-a748-3b0456ff9a27-memberlist\") pod \"speaker-8kznn\" (UID: \"8216ae30-2f9d-47c4-a748-3b0456ff9a27\") " pod="metallb-system/speaker-8kznn" Dec 13 03:24:51 crc kubenswrapper[5070]: I1213 03:24:51.503721 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8kznn" Dec 13 03:24:51 crc kubenswrapper[5070]: W1213 03:24:51.525854 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8216ae30_2f9d_47c4_a748_3b0456ff9a27.slice/crio-e51d124c6e8677c3d0eb1afb3f0fbde236a77c885dae8cf3bf5cbe57e0dd8aa0 WatchSource:0}: Error finding container e51d124c6e8677c3d0eb1afb3f0fbde236a77c885dae8cf3bf5cbe57e0dd8aa0: Status 404 returned error can't find the container with id e51d124c6e8677c3d0eb1afb3f0fbde236a77c885dae8cf3bf5cbe57e0dd8aa0 Dec 13 03:24:51 crc kubenswrapper[5070]: I1213 03:24:51.903821 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s5j9" event={"ID":"26e460ad-a509-4c45-a5a7-64cd87d2a5f0","Type":"ContainerStarted","Data":"ab8f737c95733cad7f2b649c71dfa2cf804625f76df0e4b23ef2917a6a26fbb5"} Dec 13 03:24:51 crc kubenswrapper[5070]: I1213 03:24:51.905812 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-wmrgw" event={"ID":"6f946df5-dc5e-4b3a-afd7-51209e6dd09b","Type":"ContainerStarted","Data":"cfc6d7c895d39d833f407729b5e7a44d9f584117a29b780ece670e53195f9b05"} Dec 13 03:24:51 crc kubenswrapper[5070]: I1213 03:24:51.905859 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-wmrgw" event={"ID":"6f946df5-dc5e-4b3a-afd7-51209e6dd09b","Type":"ContainerStarted","Data":"4b1ba730f508a6d340ad381862f4a15cc54943052eb75d24a9fbbe87ffbb25ed"} Dec 13 03:24:51 crc kubenswrapper[5070]: I1213 03:24:51.905945 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:24:51 crc kubenswrapper[5070]: I1213 03:24:51.907205 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8kznn" event={"ID":"8216ae30-2f9d-47c4-a748-3b0456ff9a27","Type":"ContainerStarted","Data":"e51d124c6e8677c3d0eb1afb3f0fbde236a77c885dae8cf3bf5cbe57e0dd8aa0"} Dec 13 03:24:51 crc kubenswrapper[5070]: I1213 03:24:51.924229 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5bddd4b946-wmrgw" podStartSLOduration=2.92421331 podStartE2EDuration="2.92421331s" podCreationTimestamp="2025-12-13 03:24:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:24:51.922499004 +0000 UTC m=+784.158342550" watchObservedRunningTime="2025-12-13 03:24:51.92421331 +0000 UTC m=+784.160056846" Dec 13 03:24:52 crc kubenswrapper[5070]: I1213 03:24:52.938920 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8kznn" event={"ID":"8216ae30-2f9d-47c4-a748-3b0456ff9a27","Type":"ContainerStarted","Data":"7de84ab9f9e0ac0539eac298a2c215048b0d0a2e070174b2fca4b99463c40b6c"} Dec 13 03:24:53 crc kubenswrapper[5070]: I1213 03:24:53.960425 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8kznn" event={"ID":"8216ae30-2f9d-47c4-a748-3b0456ff9a27","Type":"ContainerStarted","Data":"7e2e1aaa1bacec3629bc0b18faab1c1a2d3a70665328bd06355c178547288a9a"} Dec 13 03:24:53 crc kubenswrapper[5070]: I1213 03:24:53.960838 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-8kznn" Dec 13 03:24:53 crc kubenswrapper[5070]: I1213 03:24:53.984501 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-8kznn" podStartSLOduration=4.98448545 podStartE2EDuration="4.98448545s" podCreationTimestamp="2025-12-13 03:24:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:24:53.983505103 +0000 UTC m=+786.219348649" watchObservedRunningTime="2025-12-13 03:24:53.98448545 +0000 UTC m=+786.220328996" Dec 13 03:25:00 crc kubenswrapper[5070]: I1213 03:25:00.003127 5070 generic.go:334] "Generic (PLEG): container finished" podID="26e460ad-a509-4c45-a5a7-64cd87d2a5f0" containerID="da320fc2b5f3d5d52e448af03dca6f178447c92fa974a546b43271d73c68be7d" exitCode=0 Dec 13 03:25:00 crc kubenswrapper[5070]: I1213 03:25:00.003197 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s5j9" event={"ID":"26e460ad-a509-4c45-a5a7-64cd87d2a5f0","Type":"ContainerDied","Data":"da320fc2b5f3d5d52e448af03dca6f178447c92fa974a546b43271d73c68be7d"} Dec 13 03:25:00 crc kubenswrapper[5070]: I1213 03:25:00.005197 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" event={"ID":"64134541-9479-4e28-9408-092333fa9e08","Type":"ContainerStarted","Data":"a4b8148981b945ffb25513d1058b668ff6a995eb0abf8dfdbc92816bc24e2005"} Dec 13 03:25:00 crc kubenswrapper[5070]: I1213 03:25:00.005360 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" Dec 13 03:25:00 crc kubenswrapper[5070]: I1213 03:25:00.044453 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" podStartSLOduration=1.938183056 podStartE2EDuration="11.044420474s" podCreationTimestamp="2025-12-13 03:24:49 +0000 UTC" firstStartedPulling="2025-12-13 03:24:50.602052011 +0000 UTC m=+782.837895557" lastFinishedPulling="2025-12-13 03:24:59.708289429 +0000 UTC m=+791.944132975" observedRunningTime="2025-12-13 03:25:00.043822948 +0000 UTC m=+792.279666514" watchObservedRunningTime="2025-12-13 03:25:00.044420474 +0000 UTC m=+792.280264020" Dec 13 03:25:01 crc kubenswrapper[5070]: I1213 03:25:01.013598 5070 generic.go:334] "Generic (PLEG): container finished" podID="26e460ad-a509-4c45-a5a7-64cd87d2a5f0" containerID="f1d9bdea19b79f73c16cf0eff1c103aed70858c71fcb9826831060373fe54242" exitCode=0 Dec 13 03:25:01 crc kubenswrapper[5070]: I1213 03:25:01.013675 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s5j9" event={"ID":"26e460ad-a509-4c45-a5a7-64cd87d2a5f0","Type":"ContainerDied","Data":"f1d9bdea19b79f73c16cf0eff1c103aed70858c71fcb9826831060373fe54242"} Dec 13 03:25:02 crc kubenswrapper[5070]: I1213 03:25:02.021713 5070 generic.go:334] "Generic (PLEG): container finished" podID="26e460ad-a509-4c45-a5a7-64cd87d2a5f0" containerID="46cffdce483549a4bbff556f02aa56817d8ae599f947d54b416d6763f773c2cc" exitCode=0 Dec 13 03:25:02 crc kubenswrapper[5070]: I1213 03:25:02.021770 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s5j9" event={"ID":"26e460ad-a509-4c45-a5a7-64cd87d2a5f0","Type":"ContainerDied","Data":"46cffdce483549a4bbff556f02aa56817d8ae599f947d54b416d6763f773c2cc"} Dec 13 03:25:03 crc kubenswrapper[5070]: I1213 03:25:03.032768 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s5j9" event={"ID":"26e460ad-a509-4c45-a5a7-64cd87d2a5f0","Type":"ContainerStarted","Data":"e0f3c1848af37d04c90cbbc098ad573c50f7dbdb5157f9d55d2a41131f1bc49b"} Dec 13 03:25:03 crc kubenswrapper[5070]: I1213 03:25:03.032817 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s5j9" event={"ID":"26e460ad-a509-4c45-a5a7-64cd87d2a5f0","Type":"ContainerStarted","Data":"f31ae50d8253fd8df1268a731e929f8db0652c89aca91c5b16a029cf4b703c9f"} Dec 13 03:25:03 crc kubenswrapper[5070]: I1213 03:25:03.032829 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s5j9" event={"ID":"26e460ad-a509-4c45-a5a7-64cd87d2a5f0","Type":"ContainerStarted","Data":"7baa2790303839c8263969533c73530a224c7123bc4f878e05d0a168795fb483"} Dec 13 03:25:03 crc kubenswrapper[5070]: I1213 03:25:03.032840 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s5j9" event={"ID":"26e460ad-a509-4c45-a5a7-64cd87d2a5f0","Type":"ContainerStarted","Data":"dd2dffc898ddb518433bccc4cfd4eaf4af95995faf5bf46fb6aed73c58536c92"} Dec 13 03:25:03 crc kubenswrapper[5070]: I1213 03:25:03.032851 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s5j9" event={"ID":"26e460ad-a509-4c45-a5a7-64cd87d2a5f0","Type":"ContainerStarted","Data":"8822e89250750543a138dcc4dda27447c128d0b7268c729656ca9d0a354d5dc1"} Dec 13 03:25:03 crc kubenswrapper[5070]: I1213 03:25:03.032865 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-4s5j9" event={"ID":"26e460ad-a509-4c45-a5a7-64cd87d2a5f0","Type":"ContainerStarted","Data":"bb702be7781fcab6c93904656190628dfb41e45d5610018029578aba2b47493c"} Dec 13 03:25:03 crc kubenswrapper[5070]: I1213 03:25:03.033950 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:25:03 crc kubenswrapper[5070]: I1213 03:25:03.065380 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-4s5j9" podStartSLOduration=5.28200523 podStartE2EDuration="14.065363314s" podCreationTimestamp="2025-12-13 03:24:49 +0000 UTC" firstStartedPulling="2025-12-13 03:24:50.939593285 +0000 UTC m=+783.175436831" lastFinishedPulling="2025-12-13 03:24:59.722951369 +0000 UTC m=+791.958794915" observedRunningTime="2025-12-13 03:25:03.062215458 +0000 UTC m=+795.298059034" watchObservedRunningTime="2025-12-13 03:25:03.065363314 +0000 UTC m=+795.301206870" Dec 13 03:25:05 crc kubenswrapper[5070]: I1213 03:25:05.511527 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:25:05 crc kubenswrapper[5070]: I1213 03:25:05.563066 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.197853 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s5mb8"] Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.199864 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.204791 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s5mb8"] Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.226462 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-utilities\") pod \"certified-operators-s5mb8\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.226527 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-catalog-content\") pod \"certified-operators-s5mb8\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.226671 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cpmz\" (UniqueName: \"kubernetes.io/projected/2cf8af26-4c65-4575-8673-bdf872c641c4-kube-api-access-2cpmz\") pod \"certified-operators-s5mb8\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.327588 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cpmz\" (UniqueName: \"kubernetes.io/projected/2cf8af26-4c65-4575-8673-bdf872c641c4-kube-api-access-2cpmz\") pod \"certified-operators-s5mb8\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.327934 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-utilities\") pod \"certified-operators-s5mb8\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.327963 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-catalog-content\") pod \"certified-operators-s5mb8\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.328496 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-catalog-content\") pod \"certified-operators-s5mb8\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.328517 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-utilities\") pod \"certified-operators-s5mb8\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.346231 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cpmz\" (UniqueName: \"kubernetes.io/projected/2cf8af26-4c65-4575-8673-bdf872c641c4-kube-api-access-2cpmz\") pod \"certified-operators-s5mb8\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.528334 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:07 crc kubenswrapper[5070]: I1213 03:25:07.988318 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s5mb8"] Dec 13 03:25:07 crc kubenswrapper[5070]: W1213 03:25:07.995572 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2cf8af26_4c65_4575_8673_bdf872c641c4.slice/crio-19071085e84f9285e1b501eccbd2063435534e37757b6478d2e359e744b65e0b WatchSource:0}: Error finding container 19071085e84f9285e1b501eccbd2063435534e37757b6478d2e359e744b65e0b: Status 404 returned error can't find the container with id 19071085e84f9285e1b501eccbd2063435534e37757b6478d2e359e744b65e0b Dec 13 03:25:08 crc kubenswrapper[5070]: I1213 03:25:08.063948 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5mb8" event={"ID":"2cf8af26-4c65-4575-8673-bdf872c641c4","Type":"ContainerStarted","Data":"19071085e84f9285e1b501eccbd2063435534e37757b6478d2e359e744b65e0b"} Dec 13 03:25:09 crc kubenswrapper[5070]: I1213 03:25:09.071794 5070 generic.go:334] "Generic (PLEG): container finished" podID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerID="662ceee9e1030fc7c455da85a14f58b0fe58fe73d5df99a46371a4bb185bd008" exitCode=0 Dec 13 03:25:09 crc kubenswrapper[5070]: I1213 03:25:09.071849 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5mb8" event={"ID":"2cf8af26-4c65-4575-8673-bdf872c641c4","Type":"ContainerDied","Data":"662ceee9e1030fc7c455da85a14f58b0fe58fe73d5df99a46371a4bb185bd008"} Dec 13 03:25:09 crc kubenswrapper[5070]: I1213 03:25:09.899745 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-jg4tq" Dec 13 03:25:10 crc kubenswrapper[5070]: I1213 03:25:10.080835 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5mb8" event={"ID":"2cf8af26-4c65-4575-8673-bdf872c641c4","Type":"ContainerStarted","Data":"a446a791d9276bac02108547e1c50e5228dbd56b20eb26845ec06eabc078b181"} Dec 13 03:25:10 crc kubenswrapper[5070]: I1213 03:25:10.622810 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5bddd4b946-wmrgw" Dec 13 03:25:11 crc kubenswrapper[5070]: I1213 03:25:11.507047 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-8kznn" Dec 13 03:25:12 crc kubenswrapper[5070]: I1213 03:25:12.136233 5070 generic.go:334] "Generic (PLEG): container finished" podID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerID="a446a791d9276bac02108547e1c50e5228dbd56b20eb26845ec06eabc078b181" exitCode=0 Dec 13 03:25:12 crc kubenswrapper[5070]: I1213 03:25:12.136291 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5mb8" event={"ID":"2cf8af26-4c65-4575-8673-bdf872c641c4","Type":"ContainerDied","Data":"a446a791d9276bac02108547e1c50e5228dbd56b20eb26845ec06eabc078b181"} Dec 13 03:25:13 crc kubenswrapper[5070]: I1213 03:25:13.160931 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5mb8" event={"ID":"2cf8af26-4c65-4575-8673-bdf872c641c4","Type":"ContainerStarted","Data":"4534df822c8f8e6712ed8d5941b2c6444b845fdfc2a9a1c56484af933c128c33"} Dec 13 03:25:13 crc kubenswrapper[5070]: I1213 03:25:13.183027 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s5mb8" podStartSLOduration=2.360113936 podStartE2EDuration="6.183008843s" podCreationTimestamp="2025-12-13 03:25:07 +0000 UTC" firstStartedPulling="2025-12-13 03:25:09.07627851 +0000 UTC m=+801.312122056" lastFinishedPulling="2025-12-13 03:25:12.899173417 +0000 UTC m=+805.135016963" observedRunningTime="2025-12-13 03:25:13.182829938 +0000 UTC m=+805.418673504" watchObservedRunningTime="2025-12-13 03:25:13.183008843 +0000 UTC m=+805.418852389" Dec 13 03:25:14 crc kubenswrapper[5070]: I1213 03:25:14.553004 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-cdbc4"] Dec 13 03:25:14 crc kubenswrapper[5070]: I1213 03:25:14.553981 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cdbc4" Dec 13 03:25:14 crc kubenswrapper[5070]: I1213 03:25:14.556017 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-klwp5" Dec 13 03:25:14 crc kubenswrapper[5070]: I1213 03:25:14.556404 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 13 03:25:14 crc kubenswrapper[5070]: I1213 03:25:14.556735 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 13 03:25:14 crc kubenswrapper[5070]: I1213 03:25:14.566166 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cdbc4"] Dec 13 03:25:14 crc kubenswrapper[5070]: I1213 03:25:14.598512 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtwcm\" (UniqueName: \"kubernetes.io/projected/d57a056b-59d7-4a1e-ba1e-1102683c6118-kube-api-access-jtwcm\") pod \"openstack-operator-index-cdbc4\" (UID: \"d57a056b-59d7-4a1e-ba1e-1102683c6118\") " pod="openstack-operators/openstack-operator-index-cdbc4" Dec 13 03:25:14 crc kubenswrapper[5070]: I1213 03:25:14.700677 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtwcm\" (UniqueName: \"kubernetes.io/projected/d57a056b-59d7-4a1e-ba1e-1102683c6118-kube-api-access-jtwcm\") pod \"openstack-operator-index-cdbc4\" (UID: \"d57a056b-59d7-4a1e-ba1e-1102683c6118\") " pod="openstack-operators/openstack-operator-index-cdbc4" Dec 13 03:25:14 crc kubenswrapper[5070]: I1213 03:25:14.732398 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtwcm\" (UniqueName: \"kubernetes.io/projected/d57a056b-59d7-4a1e-ba1e-1102683c6118-kube-api-access-jtwcm\") pod \"openstack-operator-index-cdbc4\" (UID: \"d57a056b-59d7-4a1e-ba1e-1102683c6118\") " pod="openstack-operators/openstack-operator-index-cdbc4" Dec 13 03:25:14 crc kubenswrapper[5070]: I1213 03:25:14.880559 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cdbc4" Dec 13 03:25:15 crc kubenswrapper[5070]: I1213 03:25:15.329843 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cdbc4"] Dec 13 03:25:16 crc kubenswrapper[5070]: I1213 03:25:16.178290 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cdbc4" event={"ID":"d57a056b-59d7-4a1e-ba1e-1102683c6118","Type":"ContainerStarted","Data":"6c1b33661a731dbe82390202dde9fd501fc116ee7352618fc0d7c7691b18a5e8"} Dec 13 03:25:17 crc kubenswrapper[5070]: I1213 03:25:17.529404 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:17 crc kubenswrapper[5070]: I1213 03:25:17.529507 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:17 crc kubenswrapper[5070]: I1213 03:25:17.573680 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:18 crc kubenswrapper[5070]: I1213 03:25:18.236181 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:20 crc kubenswrapper[5070]: I1213 03:25:20.525014 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-4s5j9" Dec 13 03:25:21 crc kubenswrapper[5070]: I1213 03:25:21.943330 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:25:21 crc kubenswrapper[5070]: I1213 03:25:21.943405 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:25:21 crc kubenswrapper[5070]: I1213 03:25:21.971514 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s5mb8"] Dec 13 03:25:21 crc kubenswrapper[5070]: I1213 03:25:21.971753 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s5mb8" podUID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerName="registry-server" containerID="cri-o://4534df822c8f8e6712ed8d5941b2c6444b845fdfc2a9a1c56484af933c128c33" gracePeriod=2 Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.239109 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cdbc4" event={"ID":"d57a056b-59d7-4a1e-ba1e-1102683c6118","Type":"ContainerStarted","Data":"6053e2e6fc348552ab13fa3b373211975078586c6c7fdc6ee5d4f44b4d296270"} Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.257537 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-cdbc4" podStartSLOduration=1.736742906 podStartE2EDuration="8.257520104s" podCreationTimestamp="2025-12-13 03:25:14 +0000 UTC" firstStartedPulling="2025-12-13 03:25:15.338329839 +0000 UTC m=+807.574173385" lastFinishedPulling="2025-12-13 03:25:21.859107027 +0000 UTC m=+814.094950583" observedRunningTime="2025-12-13 03:25:22.256017263 +0000 UTC m=+814.491860809" watchObservedRunningTime="2025-12-13 03:25:22.257520104 +0000 UTC m=+814.493363650" Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.259103 5070 generic.go:334] "Generic (PLEG): container finished" podID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerID="4534df822c8f8e6712ed8d5941b2c6444b845fdfc2a9a1c56484af933c128c33" exitCode=0 Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.259145 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5mb8" event={"ID":"2cf8af26-4c65-4575-8673-bdf872c641c4","Type":"ContainerDied","Data":"4534df822c8f8e6712ed8d5941b2c6444b845fdfc2a9a1c56484af933c128c33"} Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.341845 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.428519 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cpmz\" (UniqueName: \"kubernetes.io/projected/2cf8af26-4c65-4575-8673-bdf872c641c4-kube-api-access-2cpmz\") pod \"2cf8af26-4c65-4575-8673-bdf872c641c4\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.428618 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-utilities\") pod \"2cf8af26-4c65-4575-8673-bdf872c641c4\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.428711 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-catalog-content\") pod \"2cf8af26-4c65-4575-8673-bdf872c641c4\" (UID: \"2cf8af26-4c65-4575-8673-bdf872c641c4\") " Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.429992 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-utilities" (OuterVolumeSpecName: "utilities") pod "2cf8af26-4c65-4575-8673-bdf872c641c4" (UID: "2cf8af26-4c65-4575-8673-bdf872c641c4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.434813 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cf8af26-4c65-4575-8673-bdf872c641c4-kube-api-access-2cpmz" (OuterVolumeSpecName: "kube-api-access-2cpmz") pod "2cf8af26-4c65-4575-8673-bdf872c641c4" (UID: "2cf8af26-4c65-4575-8673-bdf872c641c4"). InnerVolumeSpecName "kube-api-access-2cpmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.479309 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2cf8af26-4c65-4575-8673-bdf872c641c4" (UID: "2cf8af26-4c65-4575-8673-bdf872c641c4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.530630 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.530673 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cpmz\" (UniqueName: \"kubernetes.io/projected/2cf8af26-4c65-4575-8673-bdf872c641c4-kube-api-access-2cpmz\") on node \"crc\" DevicePath \"\"" Dec 13 03:25:22 crc kubenswrapper[5070]: I1213 03:25:22.530688 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cf8af26-4c65-4575-8673-bdf872c641c4-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:25:23 crc kubenswrapper[5070]: I1213 03:25:23.267805 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5mb8" event={"ID":"2cf8af26-4c65-4575-8673-bdf872c641c4","Type":"ContainerDied","Data":"19071085e84f9285e1b501eccbd2063435534e37757b6478d2e359e744b65e0b"} Dec 13 03:25:23 crc kubenswrapper[5070]: I1213 03:25:23.267855 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s5mb8" Dec 13 03:25:23 crc kubenswrapper[5070]: I1213 03:25:23.267872 5070 scope.go:117] "RemoveContainer" containerID="4534df822c8f8e6712ed8d5941b2c6444b845fdfc2a9a1c56484af933c128c33" Dec 13 03:25:23 crc kubenswrapper[5070]: I1213 03:25:23.290027 5070 scope.go:117] "RemoveContainer" containerID="a446a791d9276bac02108547e1c50e5228dbd56b20eb26845ec06eabc078b181" Dec 13 03:25:23 crc kubenswrapper[5070]: I1213 03:25:23.302222 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s5mb8"] Dec 13 03:25:23 crc kubenswrapper[5070]: I1213 03:25:23.308540 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s5mb8"] Dec 13 03:25:23 crc kubenswrapper[5070]: I1213 03:25:23.319254 5070 scope.go:117] "RemoveContainer" containerID="662ceee9e1030fc7c455da85a14f58b0fe58fe73d5df99a46371a4bb185bd008" Dec 13 03:25:24 crc kubenswrapper[5070]: I1213 03:25:24.174011 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cf8af26-4c65-4575-8673-bdf872c641c4" path="/var/lib/kubelet/pods/2cf8af26-4c65-4575-8673-bdf872c641c4/volumes" Dec 13 03:25:24 crc kubenswrapper[5070]: I1213 03:25:24.881241 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-cdbc4" Dec 13 03:25:24 crc kubenswrapper[5070]: I1213 03:25:24.881472 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-cdbc4" Dec 13 03:25:24 crc kubenswrapper[5070]: I1213 03:25:24.910894 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-cdbc4" Dec 13 03:25:34 crc kubenswrapper[5070]: I1213 03:25:34.918648 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-cdbc4" Dec 13 03:25:40 crc kubenswrapper[5070]: I1213 03:25:40.762117 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jcl86"] Dec 13 03:25:40 crc kubenswrapper[5070]: E1213 03:25:40.762995 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerName="extract-content" Dec 13 03:25:40 crc kubenswrapper[5070]: I1213 03:25:40.763014 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerName="extract-content" Dec 13 03:25:40 crc kubenswrapper[5070]: E1213 03:25:40.763030 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerName="extract-utilities" Dec 13 03:25:40 crc kubenswrapper[5070]: I1213 03:25:40.763038 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerName="extract-utilities" Dec 13 03:25:40 crc kubenswrapper[5070]: E1213 03:25:40.763060 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerName="registry-server" Dec 13 03:25:40 crc kubenswrapper[5070]: I1213 03:25:40.763069 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerName="registry-server" Dec 13 03:25:40 crc kubenswrapper[5070]: I1213 03:25:40.763220 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cf8af26-4c65-4575-8673-bdf872c641c4" containerName="registry-server" Dec 13 03:25:40 crc kubenswrapper[5070]: I1213 03:25:40.803131 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jcl86"] Dec 13 03:25:40 crc kubenswrapper[5070]: I1213 03:25:40.803302 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:40 crc kubenswrapper[5070]: I1213 03:25:40.918057 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-utilities\") pod \"community-operators-jcl86\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:40 crc kubenswrapper[5070]: I1213 03:25:40.918125 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-catalog-content\") pod \"community-operators-jcl86\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:40 crc kubenswrapper[5070]: I1213 03:25:40.918164 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ds6j\" (UniqueName: \"kubernetes.io/projected/2cb210d0-15da-410d-a470-e0969f46edf3-kube-api-access-4ds6j\") pod \"community-operators-jcl86\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:41 crc kubenswrapper[5070]: I1213 03:25:41.019831 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-utilities\") pod \"community-operators-jcl86\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:41 crc kubenswrapper[5070]: I1213 03:25:41.019883 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-catalog-content\") pod \"community-operators-jcl86\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:41 crc kubenswrapper[5070]: I1213 03:25:41.019916 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ds6j\" (UniqueName: \"kubernetes.io/projected/2cb210d0-15da-410d-a470-e0969f46edf3-kube-api-access-4ds6j\") pod \"community-operators-jcl86\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:41 crc kubenswrapper[5070]: I1213 03:25:41.020380 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-utilities\") pod \"community-operators-jcl86\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:41 crc kubenswrapper[5070]: I1213 03:25:41.020429 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-catalog-content\") pod \"community-operators-jcl86\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:41 crc kubenswrapper[5070]: I1213 03:25:41.038827 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ds6j\" (UniqueName: \"kubernetes.io/projected/2cb210d0-15da-410d-a470-e0969f46edf3-kube-api-access-4ds6j\") pod \"community-operators-jcl86\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:41 crc kubenswrapper[5070]: I1213 03:25:41.130846 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:41 crc kubenswrapper[5070]: I1213 03:25:41.594151 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jcl86"] Dec 13 03:25:42 crc kubenswrapper[5070]: I1213 03:25:42.386537 5070 generic.go:334] "Generic (PLEG): container finished" podID="2cb210d0-15da-410d-a470-e0969f46edf3" containerID="5feaf79b938cedc6af97bdcb25cf7ee08b06d63bc5ac69d46b810dae9ac8bcdb" exitCode=0 Dec 13 03:25:42 crc kubenswrapper[5070]: I1213 03:25:42.386600 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jcl86" event={"ID":"2cb210d0-15da-410d-a470-e0969f46edf3","Type":"ContainerDied","Data":"5feaf79b938cedc6af97bdcb25cf7ee08b06d63bc5ac69d46b810dae9ac8bcdb"} Dec 13 03:25:42 crc kubenswrapper[5070]: I1213 03:25:42.386628 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jcl86" event={"ID":"2cb210d0-15da-410d-a470-e0969f46edf3","Type":"ContainerStarted","Data":"ba8a54d8bc3ab01e93dc0841e8b88e36fe9f4513a0bea4a14eaf34d5f31ed025"} Dec 13 03:25:43 crc kubenswrapper[5070]: I1213 03:25:43.397045 5070 generic.go:334] "Generic (PLEG): container finished" podID="2cb210d0-15da-410d-a470-e0969f46edf3" containerID="58d2c1d4163dbb5661f303706f44dc100f784d070f2f8ba5b7a37c03d35f87e0" exitCode=0 Dec 13 03:25:43 crc kubenswrapper[5070]: I1213 03:25:43.397150 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jcl86" event={"ID":"2cb210d0-15da-410d-a470-e0969f46edf3","Type":"ContainerDied","Data":"58d2c1d4163dbb5661f303706f44dc100f784d070f2f8ba5b7a37c03d35f87e0"} Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.589165 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q"] Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.590587 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.593109 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-s4zr4" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.601833 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q"] Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.767688 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-util\") pod \"1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.767825 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnjbj\" (UniqueName: \"kubernetes.io/projected/c03750de-0097-4810-a030-d5a3f98f68fe-kube-api-access-wnjbj\") pod \"1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.767878 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-bundle\") pod \"1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.868873 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnjbj\" (UniqueName: \"kubernetes.io/projected/c03750de-0097-4810-a030-d5a3f98f68fe-kube-api-access-wnjbj\") pod \"1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.868935 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-bundle\") pod \"1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.869022 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-util\") pod \"1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.869563 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-util\") pod \"1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.869878 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-bundle\") pod \"1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.895647 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnjbj\" (UniqueName: \"kubernetes.io/projected/c03750de-0097-4810-a030-d5a3f98f68fe-kube-api-access-wnjbj\") pod \"1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:44 crc kubenswrapper[5070]: I1213 03:25:44.919850 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:45 crc kubenswrapper[5070]: I1213 03:25:45.214924 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q"] Dec 13 03:25:45 crc kubenswrapper[5070]: W1213 03:25:45.220931 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc03750de_0097_4810_a030_d5a3f98f68fe.slice/crio-76bc75a1b33da926e3c76edf6fd7ba462d6ea4145e4d7618424b5ce6f5a6caa7 WatchSource:0}: Error finding container 76bc75a1b33da926e3c76edf6fd7ba462d6ea4145e4d7618424b5ce6f5a6caa7: Status 404 returned error can't find the container with id 76bc75a1b33da926e3c76edf6fd7ba462d6ea4145e4d7618424b5ce6f5a6caa7 Dec 13 03:25:45 crc kubenswrapper[5070]: I1213 03:25:45.413263 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jcl86" event={"ID":"2cb210d0-15da-410d-a470-e0969f46edf3","Type":"ContainerStarted","Data":"7d9d7609f1f440e737e9062fd6403dc494249e06ebcd4055a3e929a9119d6ded"} Dec 13 03:25:45 crc kubenswrapper[5070]: I1213 03:25:45.414117 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" event={"ID":"c03750de-0097-4810-a030-d5a3f98f68fe","Type":"ContainerStarted","Data":"76bc75a1b33da926e3c76edf6fd7ba462d6ea4145e4d7618424b5ce6f5a6caa7"} Dec 13 03:25:46 crc kubenswrapper[5070]: I1213 03:25:46.423470 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" event={"ID":"c03750de-0097-4810-a030-d5a3f98f68fe","Type":"ContainerStarted","Data":"eb208705783d69d9ce88276b5df0f92ea3f6fd0ae8b94b31750cc8f79b2ff33d"} Dec 13 03:25:46 crc kubenswrapper[5070]: I1213 03:25:46.466395 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jcl86" podStartSLOduration=4.198389872 podStartE2EDuration="6.466377338s" podCreationTimestamp="2025-12-13 03:25:40 +0000 UTC" firstStartedPulling="2025-12-13 03:25:42.38943199 +0000 UTC m=+834.625275526" lastFinishedPulling="2025-12-13 03:25:44.657419446 +0000 UTC m=+836.893262992" observedRunningTime="2025-12-13 03:25:46.46425301 +0000 UTC m=+838.700096576" watchObservedRunningTime="2025-12-13 03:25:46.466377338 +0000 UTC m=+838.702220884" Dec 13 03:25:47 crc kubenswrapper[5070]: I1213 03:25:47.430344 5070 generic.go:334] "Generic (PLEG): container finished" podID="c03750de-0097-4810-a030-d5a3f98f68fe" containerID="eb208705783d69d9ce88276b5df0f92ea3f6fd0ae8b94b31750cc8f79b2ff33d" exitCode=0 Dec 13 03:25:47 crc kubenswrapper[5070]: I1213 03:25:47.430408 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" event={"ID":"c03750de-0097-4810-a030-d5a3f98f68fe","Type":"ContainerDied","Data":"eb208705783d69d9ce88276b5df0f92ea3f6fd0ae8b94b31750cc8f79b2ff33d"} Dec 13 03:25:48 crc kubenswrapper[5070]: I1213 03:25:48.441362 5070 generic.go:334] "Generic (PLEG): container finished" podID="c03750de-0097-4810-a030-d5a3f98f68fe" containerID="900dbdbe5409fc16096948b66cc1d74ce78480e5b31fae68278dc5ca38c9cd75" exitCode=0 Dec 13 03:25:48 crc kubenswrapper[5070]: I1213 03:25:48.441678 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" event={"ID":"c03750de-0097-4810-a030-d5a3f98f68fe","Type":"ContainerDied","Data":"900dbdbe5409fc16096948b66cc1d74ce78480e5b31fae68278dc5ca38c9cd75"} Dec 13 03:25:49 crc kubenswrapper[5070]: I1213 03:25:49.450049 5070 generic.go:334] "Generic (PLEG): container finished" podID="c03750de-0097-4810-a030-d5a3f98f68fe" containerID="a9785d87ceff542557da8d93d7836d60918e3487a6cb90629dfeb18739aae3b4" exitCode=0 Dec 13 03:25:49 crc kubenswrapper[5070]: I1213 03:25:49.450136 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" event={"ID":"c03750de-0097-4810-a030-d5a3f98f68fe","Type":"ContainerDied","Data":"a9785d87ceff542557da8d93d7836d60918e3487a6cb90629dfeb18739aae3b4"} Dec 13 03:25:50 crc kubenswrapper[5070]: I1213 03:25:50.682247 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:50 crc kubenswrapper[5070]: I1213 03:25:50.691245 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnjbj\" (UniqueName: \"kubernetes.io/projected/c03750de-0097-4810-a030-d5a3f98f68fe-kube-api-access-wnjbj\") pod \"c03750de-0097-4810-a030-d5a3f98f68fe\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " Dec 13 03:25:50 crc kubenswrapper[5070]: I1213 03:25:50.691337 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-bundle\") pod \"c03750de-0097-4810-a030-d5a3f98f68fe\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " Dec 13 03:25:50 crc kubenswrapper[5070]: I1213 03:25:50.691390 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-util\") pod \"c03750de-0097-4810-a030-d5a3f98f68fe\" (UID: \"c03750de-0097-4810-a030-d5a3f98f68fe\") " Dec 13 03:25:50 crc kubenswrapper[5070]: I1213 03:25:50.694071 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-bundle" (OuterVolumeSpecName: "bundle") pod "c03750de-0097-4810-a030-d5a3f98f68fe" (UID: "c03750de-0097-4810-a030-d5a3f98f68fe"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:25:50 crc kubenswrapper[5070]: I1213 03:25:50.700615 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03750de-0097-4810-a030-d5a3f98f68fe-kube-api-access-wnjbj" (OuterVolumeSpecName: "kube-api-access-wnjbj") pod "c03750de-0097-4810-a030-d5a3f98f68fe" (UID: "c03750de-0097-4810-a030-d5a3f98f68fe"). InnerVolumeSpecName "kube-api-access-wnjbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:25:50 crc kubenswrapper[5070]: I1213 03:25:50.714643 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-util" (OuterVolumeSpecName: "util") pod "c03750de-0097-4810-a030-d5a3f98f68fe" (UID: "c03750de-0097-4810-a030-d5a3f98f68fe"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:25:50 crc kubenswrapper[5070]: I1213 03:25:50.793502 5070 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-util\") on node \"crc\" DevicePath \"\"" Dec 13 03:25:50 crc kubenswrapper[5070]: I1213 03:25:50.794009 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnjbj\" (UniqueName: \"kubernetes.io/projected/c03750de-0097-4810-a030-d5a3f98f68fe-kube-api-access-wnjbj\") on node \"crc\" DevicePath \"\"" Dec 13 03:25:50 crc kubenswrapper[5070]: I1213 03:25:50.794116 5070 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c03750de-0097-4810-a030-d5a3f98f68fe-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:25:51 crc kubenswrapper[5070]: I1213 03:25:51.131070 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:51 crc kubenswrapper[5070]: I1213 03:25:51.131164 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:51 crc kubenswrapper[5070]: I1213 03:25:51.196930 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:51 crc kubenswrapper[5070]: I1213 03:25:51.464570 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" Dec 13 03:25:51 crc kubenswrapper[5070]: I1213 03:25:51.464527 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q" event={"ID":"c03750de-0097-4810-a030-d5a3f98f68fe","Type":"ContainerDied","Data":"76bc75a1b33da926e3c76edf6fd7ba462d6ea4145e4d7618424b5ce6f5a6caa7"} Dec 13 03:25:51 crc kubenswrapper[5070]: I1213 03:25:51.464631 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76bc75a1b33da926e3c76edf6fd7ba462d6ea4145e4d7618424b5ce6f5a6caa7" Dec 13 03:25:51 crc kubenswrapper[5070]: I1213 03:25:51.508901 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:51 crc kubenswrapper[5070]: I1213 03:25:51.942883 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:25:51 crc kubenswrapper[5070]: I1213 03:25:51.942953 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.737379 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zfbjd"] Dec 13 03:25:54 crc kubenswrapper[5070]: E1213 03:25:54.738049 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c03750de-0097-4810-a030-d5a3f98f68fe" containerName="pull" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.738323 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c03750de-0097-4810-a030-d5a3f98f68fe" containerName="pull" Dec 13 03:25:54 crc kubenswrapper[5070]: E1213 03:25:54.738347 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c03750de-0097-4810-a030-d5a3f98f68fe" containerName="extract" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.738355 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c03750de-0097-4810-a030-d5a3f98f68fe" containerName="extract" Dec 13 03:25:54 crc kubenswrapper[5070]: E1213 03:25:54.738377 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c03750de-0097-4810-a030-d5a3f98f68fe" containerName="util" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.738385 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c03750de-0097-4810-a030-d5a3f98f68fe" containerName="util" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.738543 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="c03750de-0097-4810-a030-d5a3f98f68fe" containerName="extract" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.739660 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.744427 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfbjd"] Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.754020 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-utilities\") pod \"redhat-marketplace-zfbjd\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.754111 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-catalog-content\") pod \"redhat-marketplace-zfbjd\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.754183 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vznw\" (UniqueName: \"kubernetes.io/projected/7fd333b0-9c84-440c-8bbf-4f572035cc9a-kube-api-access-4vznw\") pod \"redhat-marketplace-zfbjd\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.855467 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-utilities\") pod \"redhat-marketplace-zfbjd\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.855751 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-catalog-content\") pod \"redhat-marketplace-zfbjd\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.855879 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vznw\" (UniqueName: \"kubernetes.io/projected/7fd333b0-9c84-440c-8bbf-4f572035cc9a-kube-api-access-4vznw\") pod \"redhat-marketplace-zfbjd\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.855979 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-utilities\") pod \"redhat-marketplace-zfbjd\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.856302 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-catalog-content\") pod \"redhat-marketplace-zfbjd\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.888509 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vznw\" (UniqueName: \"kubernetes.io/projected/7fd333b0-9c84-440c-8bbf-4f572035cc9a-kube-api-access-4vznw\") pod \"redhat-marketplace-zfbjd\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.914907 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jcl86"] Dec 13 03:25:54 crc kubenswrapper[5070]: I1213 03:25:54.915351 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jcl86" podUID="2cb210d0-15da-410d-a470-e0969f46edf3" containerName="registry-server" containerID="cri-o://7d9d7609f1f440e737e9062fd6403dc494249e06ebcd4055a3e929a9119d6ded" gracePeriod=2 Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.108497 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.321783 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfbjd"] Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.491278 5070 generic.go:334] "Generic (PLEG): container finished" podID="2cb210d0-15da-410d-a470-e0969f46edf3" containerID="7d9d7609f1f440e737e9062fd6403dc494249e06ebcd4055a3e929a9119d6ded" exitCode=0 Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.491624 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jcl86" event={"ID":"2cb210d0-15da-410d-a470-e0969f46edf3","Type":"ContainerDied","Data":"7d9d7609f1f440e737e9062fd6403dc494249e06ebcd4055a3e929a9119d6ded"} Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.493094 5070 generic.go:334] "Generic (PLEG): container finished" podID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerID="f8dca42103ed7bf58ecf8795affeeeb2573ec95c2fc9d01046388579e49e6ddd" exitCode=0 Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.493123 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfbjd" event={"ID":"7fd333b0-9c84-440c-8bbf-4f572035cc9a","Type":"ContainerDied","Data":"f8dca42103ed7bf58ecf8795affeeeb2573ec95c2fc9d01046388579e49e6ddd"} Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.493139 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfbjd" event={"ID":"7fd333b0-9c84-440c-8bbf-4f572035cc9a","Type":"ContainerStarted","Data":"04cd14445e3c460f6963861b4b982c87d47a138a3c03ea90b7965d7784c6d1dd"} Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.732972 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.768394 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-utilities\") pod \"2cb210d0-15da-410d-a470-e0969f46edf3\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.768532 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ds6j\" (UniqueName: \"kubernetes.io/projected/2cb210d0-15da-410d-a470-e0969f46edf3-kube-api-access-4ds6j\") pod \"2cb210d0-15da-410d-a470-e0969f46edf3\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.768572 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-catalog-content\") pod \"2cb210d0-15da-410d-a470-e0969f46edf3\" (UID: \"2cb210d0-15da-410d-a470-e0969f46edf3\") " Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.769205 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-utilities" (OuterVolumeSpecName: "utilities") pod "2cb210d0-15da-410d-a470-e0969f46edf3" (UID: "2cb210d0-15da-410d-a470-e0969f46edf3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.775611 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cb210d0-15da-410d-a470-e0969f46edf3-kube-api-access-4ds6j" (OuterVolumeSpecName: "kube-api-access-4ds6j") pod "2cb210d0-15da-410d-a470-e0969f46edf3" (UID: "2cb210d0-15da-410d-a470-e0969f46edf3"). InnerVolumeSpecName "kube-api-access-4ds6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.795326 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ds6j\" (UniqueName: \"kubernetes.io/projected/2cb210d0-15da-410d-a470-e0969f46edf3-kube-api-access-4ds6j\") on node \"crc\" DevicePath \"\"" Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.795356 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.833288 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2cb210d0-15da-410d-a470-e0969f46edf3" (UID: "2cb210d0-15da-410d-a470-e0969f46edf3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:25:55 crc kubenswrapper[5070]: I1213 03:25:55.896160 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cb210d0-15da-410d-a470-e0969f46edf3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.499305 5070 generic.go:334] "Generic (PLEG): container finished" podID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerID="2408a04435bed956e99d75ebda2e2e1ba328e04c89806075c3dd2ba04ade27f8" exitCode=0 Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.499403 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfbjd" event={"ID":"7fd333b0-9c84-440c-8bbf-4f572035cc9a","Type":"ContainerDied","Data":"2408a04435bed956e99d75ebda2e2e1ba328e04c89806075c3dd2ba04ade27f8"} Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.508506 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jcl86" event={"ID":"2cb210d0-15da-410d-a470-e0969f46edf3","Type":"ContainerDied","Data":"ba8a54d8bc3ab01e93dc0841e8b88e36fe9f4513a0bea4a14eaf34d5f31ed025"} Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.508690 5070 scope.go:117] "RemoveContainer" containerID="7d9d7609f1f440e737e9062fd6403dc494249e06ebcd4055a3e929a9119d6ded" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.508931 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jcl86" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.524509 5070 scope.go:117] "RemoveContainer" containerID="58d2c1d4163dbb5661f303706f44dc100f784d070f2f8ba5b7a37c03d35f87e0" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.540610 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jcl86"] Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.547130 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jcl86"] Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.556752 5070 scope.go:117] "RemoveContainer" containerID="5feaf79b938cedc6af97bdcb25cf7ee08b06d63bc5ac69d46b810dae9ac8bcdb" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.947744 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww"] Dec 13 03:25:56 crc kubenswrapper[5070]: E1213 03:25:56.947976 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cb210d0-15da-410d-a470-e0969f46edf3" containerName="extract-utilities" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.947987 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cb210d0-15da-410d-a470-e0969f46edf3" containerName="extract-utilities" Dec 13 03:25:56 crc kubenswrapper[5070]: E1213 03:25:56.948009 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cb210d0-15da-410d-a470-e0969f46edf3" containerName="registry-server" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.948015 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cb210d0-15da-410d-a470-e0969f46edf3" containerName="registry-server" Dec 13 03:25:56 crc kubenswrapper[5070]: E1213 03:25:56.948027 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cb210d0-15da-410d-a470-e0969f46edf3" containerName="extract-content" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.948033 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cb210d0-15da-410d-a470-e0969f46edf3" containerName="extract-content" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.948127 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cb210d0-15da-410d-a470-e0969f46edf3" containerName="registry-server" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.948681 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.951003 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-4mmtn" Dec 13 03:25:56 crc kubenswrapper[5070]: I1213 03:25:56.976052 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww"] Dec 13 03:25:57 crc kubenswrapper[5070]: I1213 03:25:57.009533 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbnkl\" (UniqueName: \"kubernetes.io/projected/6dcdfaac-dbf4-459e-9798-3a1de9b35794-kube-api-access-dbnkl\") pod \"openstack-operator-controller-operator-69d6bd7c7c-k2jww\" (UID: \"6dcdfaac-dbf4-459e-9798-3a1de9b35794\") " pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" Dec 13 03:25:57 crc kubenswrapper[5070]: I1213 03:25:57.110475 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbnkl\" (UniqueName: \"kubernetes.io/projected/6dcdfaac-dbf4-459e-9798-3a1de9b35794-kube-api-access-dbnkl\") pod \"openstack-operator-controller-operator-69d6bd7c7c-k2jww\" (UID: \"6dcdfaac-dbf4-459e-9798-3a1de9b35794\") " pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" Dec 13 03:25:57 crc kubenswrapper[5070]: I1213 03:25:57.129840 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbnkl\" (UniqueName: \"kubernetes.io/projected/6dcdfaac-dbf4-459e-9798-3a1de9b35794-kube-api-access-dbnkl\") pod \"openstack-operator-controller-operator-69d6bd7c7c-k2jww\" (UID: \"6dcdfaac-dbf4-459e-9798-3a1de9b35794\") " pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" Dec 13 03:25:57 crc kubenswrapper[5070]: I1213 03:25:57.269714 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" Dec 13 03:25:57 crc kubenswrapper[5070]: I1213 03:25:57.517209 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfbjd" event={"ID":"7fd333b0-9c84-440c-8bbf-4f572035cc9a","Type":"ContainerStarted","Data":"67c40b2784f427ad00c0cddb1ff75a7b2dd26dae54ef96758fb7b53ac984e763"} Dec 13 03:25:57 crc kubenswrapper[5070]: I1213 03:25:57.519244 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww"] Dec 13 03:25:57 crc kubenswrapper[5070]: W1213 03:25:57.527066 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6dcdfaac_dbf4_459e_9798_3a1de9b35794.slice/crio-00c76cb409d1e61a5b91babccc77d343a39dfb581969778f6116de62e37f1ba8 WatchSource:0}: Error finding container 00c76cb409d1e61a5b91babccc77d343a39dfb581969778f6116de62e37f1ba8: Status 404 returned error can't find the container with id 00c76cb409d1e61a5b91babccc77d343a39dfb581969778f6116de62e37f1ba8 Dec 13 03:25:57 crc kubenswrapper[5070]: I1213 03:25:57.542792 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zfbjd" podStartSLOduration=2.144650589 podStartE2EDuration="3.542774885s" podCreationTimestamp="2025-12-13 03:25:54 +0000 UTC" firstStartedPulling="2025-12-13 03:25:55.494368675 +0000 UTC m=+847.730212231" lastFinishedPulling="2025-12-13 03:25:56.892492981 +0000 UTC m=+849.128336527" observedRunningTime="2025-12-13 03:25:57.541231543 +0000 UTC m=+849.777075099" watchObservedRunningTime="2025-12-13 03:25:57.542774885 +0000 UTC m=+849.778618431" Dec 13 03:25:58 crc kubenswrapper[5070]: I1213 03:25:58.179322 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cb210d0-15da-410d-a470-e0969f46edf3" path="/var/lib/kubelet/pods/2cb210d0-15da-410d-a470-e0969f46edf3/volumes" Dec 13 03:25:58 crc kubenswrapper[5070]: I1213 03:25:58.528071 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" event={"ID":"6dcdfaac-dbf4-459e-9798-3a1de9b35794","Type":"ContainerStarted","Data":"00c76cb409d1e61a5b91babccc77d343a39dfb581969778f6116de62e37f1ba8"} Dec 13 03:26:03 crc kubenswrapper[5070]: I1213 03:26:03.560273 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" event={"ID":"6dcdfaac-dbf4-459e-9798-3a1de9b35794","Type":"ContainerStarted","Data":"0e16e13b57f952025b94defa60fa13ec68805030ac714cb42287bac58fbc4acf"} Dec 13 03:26:05 crc kubenswrapper[5070]: I1213 03:26:05.110269 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:26:05 crc kubenswrapper[5070]: I1213 03:26:05.110810 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:26:05 crc kubenswrapper[5070]: I1213 03:26:05.258298 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:26:05 crc kubenswrapper[5070]: I1213 03:26:05.676014 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:26:07 crc kubenswrapper[5070]: I1213 03:26:07.315893 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfbjd"] Dec 13 03:26:07 crc kubenswrapper[5070]: I1213 03:26:07.612889 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zfbjd" podUID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerName="registry-server" containerID="cri-o://67c40b2784f427ad00c0cddb1ff75a7b2dd26dae54ef96758fb7b53ac984e763" gracePeriod=2 Dec 13 03:26:07 crc kubenswrapper[5070]: I1213 03:26:07.613154 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" event={"ID":"6dcdfaac-dbf4-459e-9798-3a1de9b35794","Type":"ContainerStarted","Data":"6bdafdf94c75865369eaff36a531c099f2344a96280c83c177ed7d017f1f3c03"} Dec 13 03:26:07 crc kubenswrapper[5070]: I1213 03:26:07.613347 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" Dec 13 03:26:07 crc kubenswrapper[5070]: I1213 03:26:07.644587 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" podStartSLOduration=2.272955885 podStartE2EDuration="11.644571982s" podCreationTimestamp="2025-12-13 03:25:56 +0000 UTC" firstStartedPulling="2025-12-13 03:25:57.532082123 +0000 UTC m=+849.767925669" lastFinishedPulling="2025-12-13 03:26:06.90369822 +0000 UTC m=+859.139541766" observedRunningTime="2025-12-13 03:26:07.641135418 +0000 UTC m=+859.876978964" watchObservedRunningTime="2025-12-13 03:26:07.644571982 +0000 UTC m=+859.880415528" Dec 13 03:26:08 crc kubenswrapper[5070]: I1213 03:26:08.620703 5070 generic.go:334] "Generic (PLEG): container finished" podID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerID="67c40b2784f427ad00c0cddb1ff75a7b2dd26dae54ef96758fb7b53ac984e763" exitCode=0 Dec 13 03:26:08 crc kubenswrapper[5070]: I1213 03:26:08.620785 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfbjd" event={"ID":"7fd333b0-9c84-440c-8bbf-4f572035cc9a","Type":"ContainerDied","Data":"67c40b2784f427ad00c0cddb1ff75a7b2dd26dae54ef96758fb7b53ac984e763"} Dec 13 03:26:08 crc kubenswrapper[5070]: I1213 03:26:08.625180 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-69d6bd7c7c-k2jww" Dec 13 03:26:08 crc kubenswrapper[5070]: I1213 03:26:08.854830 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:26:08 crc kubenswrapper[5070]: I1213 03:26:08.994136 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vznw\" (UniqueName: \"kubernetes.io/projected/7fd333b0-9c84-440c-8bbf-4f572035cc9a-kube-api-access-4vznw\") pod \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " Dec 13 03:26:08 crc kubenswrapper[5070]: I1213 03:26:08.994202 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-utilities\") pod \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " Dec 13 03:26:08 crc kubenswrapper[5070]: I1213 03:26:08.994264 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-catalog-content\") pod \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\" (UID: \"7fd333b0-9c84-440c-8bbf-4f572035cc9a\") " Dec 13 03:26:08 crc kubenswrapper[5070]: I1213 03:26:08.996433 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-utilities" (OuterVolumeSpecName: "utilities") pod "7fd333b0-9c84-440c-8bbf-4f572035cc9a" (UID: "7fd333b0-9c84-440c-8bbf-4f572035cc9a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.001130 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fd333b0-9c84-440c-8bbf-4f572035cc9a-kube-api-access-4vznw" (OuterVolumeSpecName: "kube-api-access-4vznw") pod "7fd333b0-9c84-440c-8bbf-4f572035cc9a" (UID: "7fd333b0-9c84-440c-8bbf-4f572035cc9a"). InnerVolumeSpecName "kube-api-access-4vznw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.014120 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7fd333b0-9c84-440c-8bbf-4f572035cc9a" (UID: "7fd333b0-9c84-440c-8bbf-4f572035cc9a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.095328 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vznw\" (UniqueName: \"kubernetes.io/projected/7fd333b0-9c84-440c-8bbf-4f572035cc9a-kube-api-access-4vznw\") on node \"crc\" DevicePath \"\"" Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.095361 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.095371 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fd333b0-9c84-440c-8bbf-4f572035cc9a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.627389 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfbjd" event={"ID":"7fd333b0-9c84-440c-8bbf-4f572035cc9a","Type":"ContainerDied","Data":"04cd14445e3c460f6963861b4b982c87d47a138a3c03ea90b7965d7784c6d1dd"} Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.627468 5070 scope.go:117] "RemoveContainer" containerID="67c40b2784f427ad00c0cddb1ff75a7b2dd26dae54ef96758fb7b53ac984e763" Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.627471 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfbjd" Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.643706 5070 scope.go:117] "RemoveContainer" containerID="2408a04435bed956e99d75ebda2e2e1ba328e04c89806075c3dd2ba04ade27f8" Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.651822 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfbjd"] Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.655706 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfbjd"] Dec 13 03:26:09 crc kubenswrapper[5070]: I1213 03:26:09.672790 5070 scope.go:117] "RemoveContainer" containerID="f8dca42103ed7bf58ecf8795affeeeb2573ec95c2fc9d01046388579e49e6ddd" Dec 13 03:26:10 crc kubenswrapper[5070]: I1213 03:26:10.175344 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" path="/var/lib/kubelet/pods/7fd333b0-9c84-440c-8bbf-4f572035cc9a/volumes" Dec 13 03:26:21 crc kubenswrapper[5070]: I1213 03:26:21.942852 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:26:21 crc kubenswrapper[5070]: I1213 03:26:21.944851 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:26:21 crc kubenswrapper[5070]: I1213 03:26:21.944995 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:26:21 crc kubenswrapper[5070]: I1213 03:26:21.945721 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"440f1b13ac82692ab2897557fa86ac3ce3eba37cec807bd53246344ef4b3c0b6"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 03:26:21 crc kubenswrapper[5070]: I1213 03:26:21.945902 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://440f1b13ac82692ab2897557fa86ac3ce3eba37cec807bd53246344ef4b3c0b6" gracePeriod=600 Dec 13 03:26:22 crc kubenswrapper[5070]: I1213 03:26:22.704273 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="440f1b13ac82692ab2897557fa86ac3ce3eba37cec807bd53246344ef4b3c0b6" exitCode=0 Dec 13 03:26:22 crc kubenswrapper[5070]: I1213 03:26:22.704790 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"440f1b13ac82692ab2897557fa86ac3ce3eba37cec807bd53246344ef4b3c0b6"} Dec 13 03:26:22 crc kubenswrapper[5070]: I1213 03:26:22.704883 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"23a1c6bfa0fe9bf90a6fdead1bc43aade8cb45302f3b76d55b4d7f69ae3c4750"} Dec 13 03:26:22 crc kubenswrapper[5070]: I1213 03:26:22.704953 5070 scope.go:117] "RemoveContainer" containerID="b5e0b24c2b047322a8bb0e293689c3f0d2f12b10fd38f01969f32e9cc47932b2" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.268528 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l"] Dec 13 03:26:35 crc kubenswrapper[5070]: E1213 03:26:35.269285 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerName="extract-utilities" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.269302 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerName="extract-utilities" Dec 13 03:26:35 crc kubenswrapper[5070]: E1213 03:26:35.269327 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerName="registry-server" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.269335 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerName="registry-server" Dec 13 03:26:35 crc kubenswrapper[5070]: E1213 03:26:35.269351 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerName="extract-content" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.269358 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerName="extract-content" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.269687 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fd333b0-9c84-440c-8bbf-4f572035cc9a" containerName="registry-server" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.270323 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.272477 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-jfnv6" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.284937 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.290181 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.291075 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.293328 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-6h5xj" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.312689 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.313927 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.319321 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-7lx5v" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.324231 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.349160 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.350457 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.357547 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.358655 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-mzqkm" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.358825 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.361634 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-r8plk" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.387360 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.404525 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.411288 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.415179 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.426953 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.434850 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-9rj4l" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.441066 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwb7c\" (UniqueName: \"kubernetes.io/projected/460be9d3-0ac8-4080-ba44-48db8452a323-kube-api-access-dwb7c\") pod \"barbican-operator-controller-manager-5bfbbb859d-blc2l\" (UID: \"460be9d3-0ac8-4080-ba44-48db8452a323\") " pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.441147 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56tl6\" (UniqueName: \"kubernetes.io/projected/df0aabbc-0421-41d7-ac6b-540a4bd7121e-kube-api-access-56tl6\") pod \"cinder-operator-controller-manager-75f975cf8c-wb8p4\" (UID: \"df0aabbc-0421-41d7-ac6b-540a4bd7121e\") " pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.441228 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsgd8\" (UniqueName: \"kubernetes.io/projected/5848798f-7e27-4b39-b60d-84edb77c765d-kube-api-access-vsgd8\") pod \"designate-operator-controller-manager-6788cc6d75-8hczh\" (UID: \"5848798f-7e27-4b39-b60d-84edb77c765d\") " pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.441672 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.442683 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.445594 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-fhhrc" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.447809 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.448110 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.456740 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.481508 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.482718 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.483217 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.483985 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.521311 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-2zmdm" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.521552 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-299dl" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.532221 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.533254 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.546721 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwb7c\" (UniqueName: \"kubernetes.io/projected/460be9d3-0ac8-4080-ba44-48db8452a323-kube-api-access-dwb7c\") pod \"barbican-operator-controller-manager-5bfbbb859d-blc2l\" (UID: \"460be9d3-0ac8-4080-ba44-48db8452a323\") " pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.546781 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56tl6\" (UniqueName: \"kubernetes.io/projected/df0aabbc-0421-41d7-ac6b-540a4bd7121e-kube-api-access-56tl6\") pod \"cinder-operator-controller-manager-75f975cf8c-wb8p4\" (UID: \"df0aabbc-0421-41d7-ac6b-540a4bd7121e\") " pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.546806 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4fadcc6f-e200-444e-b1d4-e195467c129d-cert\") pod \"infra-operator-controller-manager-85d55b5858-xjz7n\" (UID: \"4fadcc6f-e200-444e-b1d4-e195467c129d\") " pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.546822 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwnwz\" (UniqueName: \"kubernetes.io/projected/17d2a89b-feed-4eae-bbc3-5296e677ef48-kube-api-access-zwnwz\") pod \"horizon-operator-controller-manager-7d5d9fd47f-lt4rh\" (UID: \"17d2a89b-feed-4eae-bbc3-5296e677ef48\") " pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.546854 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj7mm\" (UniqueName: \"kubernetes.io/projected/66c61c3a-a7b0-4a4a-b086-9c1531a9b165-kube-api-access-rj7mm\") pod \"manila-operator-controller-manager-5cbc8c7f96-889ct\" (UID: \"66c61c3a-a7b0-4a4a-b086-9c1531a9b165\") " pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.546891 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvcvm\" (UniqueName: \"kubernetes.io/projected/17401b15-4810-4d28-9244-f1ef166c3278-kube-api-access-mvcvm\") pod \"glance-operator-controller-manager-85fbd69fcd-98cmt\" (UID: \"17401b15-4810-4d28-9244-f1ef166c3278\") " pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.546911 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsgd8\" (UniqueName: \"kubernetes.io/projected/5848798f-7e27-4b39-b60d-84edb77c765d-kube-api-access-vsgd8\") pod \"designate-operator-controller-manager-6788cc6d75-8hczh\" (UID: \"5848798f-7e27-4b39-b60d-84edb77c765d\") " pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.547412 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szjkf\" (UniqueName: \"kubernetes.io/projected/62ea5d75-2d09-414d-b682-6ed196245ea9-kube-api-access-szjkf\") pod \"heat-operator-controller-manager-698d6fd7d6-nm89q\" (UID: \"62ea5d75-2d09-414d-b682-6ed196245ea9\") " pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.547493 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtmdq\" (UniqueName: \"kubernetes.io/projected/4fadcc6f-e200-444e-b1d4-e195467c129d-kube-api-access-mtmdq\") pod \"infra-operator-controller-manager-85d55b5858-xjz7n\" (UID: \"4fadcc6f-e200-444e-b1d4-e195467c129d\") " pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.567743 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-pdm54" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.568508 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.577291 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.583624 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.590741 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.600880 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-xgfz4" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.631072 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwb7c\" (UniqueName: \"kubernetes.io/projected/460be9d3-0ac8-4080-ba44-48db8452a323-kube-api-access-dwb7c\") pod \"barbican-operator-controller-manager-5bfbbb859d-blc2l\" (UID: \"460be9d3-0ac8-4080-ba44-48db8452a323\") " pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.651130 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtmdq\" (UniqueName: \"kubernetes.io/projected/4fadcc6f-e200-444e-b1d4-e195467c129d-kube-api-access-mtmdq\") pod \"infra-operator-controller-manager-85d55b5858-xjz7n\" (UID: \"4fadcc6f-e200-444e-b1d4-e195467c129d\") " pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.651211 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4fadcc6f-e200-444e-b1d4-e195467c129d-cert\") pod \"infra-operator-controller-manager-85d55b5858-xjz7n\" (UID: \"4fadcc6f-e200-444e-b1d4-e195467c129d\") " pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.651235 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwnwz\" (UniqueName: \"kubernetes.io/projected/17d2a89b-feed-4eae-bbc3-5296e677ef48-kube-api-access-zwnwz\") pod \"horizon-operator-controller-manager-7d5d9fd47f-lt4rh\" (UID: \"17d2a89b-feed-4eae-bbc3-5296e677ef48\") " pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.651266 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj7mm\" (UniqueName: \"kubernetes.io/projected/66c61c3a-a7b0-4a4a-b086-9c1531a9b165-kube-api-access-rj7mm\") pod \"manila-operator-controller-manager-5cbc8c7f96-889ct\" (UID: \"66c61c3a-a7b0-4a4a-b086-9c1531a9b165\") " pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.651301 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zg22\" (UniqueName: \"kubernetes.io/projected/eac9d917-6a87-4f79-9758-15984dd71e23-kube-api-access-5zg22\") pod \"ironic-operator-controller-manager-54485f899-6ccdh\" (UID: \"eac9d917-6a87-4f79-9758-15984dd71e23\") " pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.651327 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvcvm\" (UniqueName: \"kubernetes.io/projected/17401b15-4810-4d28-9244-f1ef166c3278-kube-api-access-mvcvm\") pod \"glance-operator-controller-manager-85fbd69fcd-98cmt\" (UID: \"17401b15-4810-4d28-9244-f1ef166c3278\") " pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.651362 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szjkf\" (UniqueName: \"kubernetes.io/projected/62ea5d75-2d09-414d-b682-6ed196245ea9-kube-api-access-szjkf\") pod \"heat-operator-controller-manager-698d6fd7d6-nm89q\" (UID: \"62ea5d75-2d09-414d-b682-6ed196245ea9\") " pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.651382 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjn77\" (UniqueName: \"kubernetes.io/projected/a037ecf5-dd37-4305-93a6-e28771e8df87-kube-api-access-qjn77\") pod \"keystone-operator-controller-manager-79cc9d59f5-tnnbf\" (UID: \"a037ecf5-dd37-4305-93a6-e28771e8df87\") " pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" Dec 13 03:26:35 crc kubenswrapper[5070]: E1213 03:26:35.652584 5070 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 13 03:26:35 crc kubenswrapper[5070]: E1213 03:26:35.652634 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fadcc6f-e200-444e-b1d4-e195467c129d-cert podName:4fadcc6f-e200-444e-b1d4-e195467c129d nodeName:}" failed. No retries permitted until 2025-12-13 03:26:36.152619217 +0000 UTC m=+888.388462763 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4fadcc6f-e200-444e-b1d4-e195467c129d-cert") pod "infra-operator-controller-manager-85d55b5858-xjz7n" (UID: "4fadcc6f-e200-444e-b1d4-e195467c129d") : secret "infra-operator-webhook-server-cert" not found Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.688784 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsgd8\" (UniqueName: \"kubernetes.io/projected/5848798f-7e27-4b39-b60d-84edb77c765d-kube-api-access-vsgd8\") pod \"designate-operator-controller-manager-6788cc6d75-8hczh\" (UID: \"5848798f-7e27-4b39-b60d-84edb77c765d\") " pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.703971 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56tl6\" (UniqueName: \"kubernetes.io/projected/df0aabbc-0421-41d7-ac6b-540a4bd7121e-kube-api-access-56tl6\") pod \"cinder-operator-controller-manager-75f975cf8c-wb8p4\" (UID: \"df0aabbc-0421-41d7-ac6b-540a4bd7121e\") " pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.756519 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.758996 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.760238 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.770979 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvcvm\" (UniqueName: \"kubernetes.io/projected/17401b15-4810-4d28-9244-f1ef166c3278-kube-api-access-mvcvm\") pod \"glance-operator-controller-manager-85fbd69fcd-98cmt\" (UID: \"17401b15-4810-4d28-9244-f1ef166c3278\") " pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.783740 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtmdq\" (UniqueName: \"kubernetes.io/projected/4fadcc6f-e200-444e-b1d4-e195467c129d-kube-api-access-mtmdq\") pod \"infra-operator-controller-manager-85d55b5858-xjz7n\" (UID: \"4fadcc6f-e200-444e-b1d4-e195467c129d\") " pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.784794 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zg22\" (UniqueName: \"kubernetes.io/projected/eac9d917-6a87-4f79-9758-15984dd71e23-kube-api-access-5zg22\") pod \"ironic-operator-controller-manager-54485f899-6ccdh\" (UID: \"eac9d917-6a87-4f79-9758-15984dd71e23\") " pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.784843 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56plt\" (UniqueName: \"kubernetes.io/projected/26894e71-7711-47cc-afe2-44f1d0657000-kube-api-access-56plt\") pod \"mariadb-operator-controller-manager-64d7c556cd-9xzl9\" (UID: \"26894e71-7711-47cc-afe2-44f1d0657000\") " pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.784884 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjn77\" (UniqueName: \"kubernetes.io/projected/a037ecf5-dd37-4305-93a6-e28771e8df87-kube-api-access-qjn77\") pod \"keystone-operator-controller-manager-79cc9d59f5-tnnbf\" (UID: \"a037ecf5-dd37-4305-93a6-e28771e8df87\") " pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.795507 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.802654 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szjkf\" (UniqueName: \"kubernetes.io/projected/62ea5d75-2d09-414d-b682-6ed196245ea9-kube-api-access-szjkf\") pod \"heat-operator-controller-manager-698d6fd7d6-nm89q\" (UID: \"62ea5d75-2d09-414d-b682-6ed196245ea9\") " pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.803132 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwnwz\" (UniqueName: \"kubernetes.io/projected/17d2a89b-feed-4eae-bbc3-5296e677ef48-kube-api-access-zwnwz\") pod \"horizon-operator-controller-manager-7d5d9fd47f-lt4rh\" (UID: \"17d2a89b-feed-4eae-bbc3-5296e677ef48\") " pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.803467 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-jftq9" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.815519 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.818211 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj7mm\" (UniqueName: \"kubernetes.io/projected/66c61c3a-a7b0-4a4a-b086-9c1531a9b165-kube-api-access-rj7mm\") pod \"manila-operator-controller-manager-5cbc8c7f96-889ct\" (UID: \"66c61c3a-a7b0-4a4a-b086-9c1531a9b165\") " pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.820079 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjn77\" (UniqueName: \"kubernetes.io/projected/a037ecf5-dd37-4305-93a6-e28771e8df87-kube-api-access-qjn77\") pod \"keystone-operator-controller-manager-79cc9d59f5-tnnbf\" (UID: \"a037ecf5-dd37-4305-93a6-e28771e8df87\") " pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.838492 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.839506 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.842303 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.843342 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.851842 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-82w74" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.851962 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-w8hhn" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.873471 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zg22\" (UniqueName: \"kubernetes.io/projected/eac9d917-6a87-4f79-9758-15984dd71e23-kube-api-access-5zg22\") pod \"ironic-operator-controller-manager-54485f899-6ccdh\" (UID: \"eac9d917-6a87-4f79-9758-15984dd71e23\") " pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.879078 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.883700 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.885580 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwck2\" (UniqueName: \"kubernetes.io/projected/714804fd-d184-418f-a12f-efaec040cef1-kube-api-access-nwck2\") pod \"nova-operator-controller-manager-79d658b66d-9kcnz\" (UID: \"714804fd-d184-418f-a12f-efaec040cef1\") " pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.885612 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59wlv\" (UniqueName: \"kubernetes.io/projected/cfd2f621-2f11-4b93-8b02-1ed72c06bb11-kube-api-access-59wlv\") pod \"neutron-operator-controller-manager-58879495c-mfwwx\" (UID: \"cfd2f621-2f11-4b93-8b02-1ed72c06bb11\") " pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.885643 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56plt\" (UniqueName: \"kubernetes.io/projected/26894e71-7711-47cc-afe2-44f1d0657000-kube-api-access-56plt\") pod \"mariadb-operator-controller-manager-64d7c556cd-9xzl9\" (UID: \"26894e71-7711-47cc-afe2-44f1d0657000\") " pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.885712 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2jcr\" (UniqueName: \"kubernetes.io/projected/954cafe5-7d98-4ccd-ad79-2c928fa65dc1-kube-api-access-x2jcr\") pod \"octavia-operator-controller-manager-d5fb87cb8-5mqm7\" (UID: \"954cafe5-7d98-4ccd-ad79-2c928fa65dc1\") " pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.891163 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.900109 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.900652 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.920146 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56plt\" (UniqueName: \"kubernetes.io/projected/26894e71-7711-47cc-afe2-44f1d0657000-kube-api-access-56plt\") pod \"mariadb-operator-controller-manager-64d7c556cd-9xzl9\" (UID: \"26894e71-7711-47cc-afe2-44f1d0657000\") " pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.920208 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.922105 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.933629 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-867d87977b-v59rf"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.934674 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.935187 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.940065 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.951018 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.952156 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.956903 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.957239 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-s7ww2" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.957452 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-hqmjm" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.957677 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-t2tw2" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.960333 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-867d87977b-v59rf"] Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.976253 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.983275 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.986584 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-cert\") pod \"openstack-baremetal-operator-controller-manager-77868f484-qs7zf\" (UID: \"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.986626 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwck2\" (UniqueName: \"kubernetes.io/projected/714804fd-d184-418f-a12f-efaec040cef1-kube-api-access-nwck2\") pod \"nova-operator-controller-manager-79d658b66d-9kcnz\" (UID: \"714804fd-d184-418f-a12f-efaec040cef1\") " pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.986646 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59wlv\" (UniqueName: \"kubernetes.io/projected/cfd2f621-2f11-4b93-8b02-1ed72c06bb11-kube-api-access-59wlv\") pod \"neutron-operator-controller-manager-58879495c-mfwwx\" (UID: \"cfd2f621-2f11-4b93-8b02-1ed72c06bb11\") " pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.986663 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjcrz\" (UniqueName: \"kubernetes.io/projected/10bd6456-bd70-4e1a-a6e0-5eff23c9fec6-kube-api-access-sjcrz\") pod \"placement-operator-controller-manager-867d87977b-v59rf\" (UID: \"10bd6456-bd70-4e1a-a6e0-5eff23c9fec6\") " pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.986688 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2kk6\" (UniqueName: \"kubernetes.io/projected/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-kube-api-access-b2kk6\") pod \"openstack-baremetal-operator-controller-manager-77868f484-qs7zf\" (UID: \"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.986727 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2b8f\" (UniqueName: \"kubernetes.io/projected/a4e6faf7-1a69-45ea-ab85-23acffcd5cf0-kube-api-access-w2b8f\") pod \"ovn-operator-controller-manager-5b67cfc8fb-8fhvk\" (UID: \"a4e6faf7-1a69-45ea-ab85-23acffcd5cf0\") " pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" Dec 13 03:26:35 crc kubenswrapper[5070]: I1213 03:26:35.986750 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2jcr\" (UniqueName: \"kubernetes.io/projected/954cafe5-7d98-4ccd-ad79-2c928fa65dc1-kube-api-access-x2jcr\") pod \"octavia-operator-controller-manager-d5fb87cb8-5mqm7\" (UID: \"954cafe5-7d98-4ccd-ad79-2c928fa65dc1\") " pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.016952 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.021458 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59wlv\" (UniqueName: \"kubernetes.io/projected/cfd2f621-2f11-4b93-8b02-1ed72c06bb11-kube-api-access-59wlv\") pod \"neutron-operator-controller-manager-58879495c-mfwwx\" (UID: \"cfd2f621-2f11-4b93-8b02-1ed72c06bb11\") " pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.034367 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2jcr\" (UniqueName: \"kubernetes.io/projected/954cafe5-7d98-4ccd-ad79-2c928fa65dc1-kube-api-access-x2jcr\") pod \"octavia-operator-controller-manager-d5fb87cb8-5mqm7\" (UID: \"954cafe5-7d98-4ccd-ad79-2c928fa65dc1\") " pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.043040 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwck2\" (UniqueName: \"kubernetes.io/projected/714804fd-d184-418f-a12f-efaec040cef1-kube-api-access-nwck2\") pod \"nova-operator-controller-manager-79d658b66d-9kcnz\" (UID: \"714804fd-d184-418f-a12f-efaec040cef1\") " pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.050401 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.055784 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.056702 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.057304 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.058291 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.059918 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.069052 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-wfcc8" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.069192 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.073849 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.077467 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-wcrzg" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.088639 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxxzr\" (UniqueName: \"kubernetes.io/projected/515c6c6d-7b89-4ee4-a1eb-d6ed51834050-kube-api-access-gxxzr\") pod \"swift-operator-controller-manager-8f6687c44-lcwtx\" (UID: \"515c6c6d-7b89-4ee4-a1eb-d6ed51834050\") " pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.088703 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-cert\") pod \"openstack-baremetal-operator-controller-manager-77868f484-qs7zf\" (UID: \"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.088722 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwfz2\" (UniqueName: \"kubernetes.io/projected/42bdbde2-ee6a-4260-8088-4298757880e1-kube-api-access-fwfz2\") pod \"telemetry-operator-controller-manager-695797c565-nh8lk\" (UID: \"42bdbde2-ee6a-4260-8088-4298757880e1\") " pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.088757 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjcrz\" (UniqueName: \"kubernetes.io/projected/10bd6456-bd70-4e1a-a6e0-5eff23c9fec6-kube-api-access-sjcrz\") pod \"placement-operator-controller-manager-867d87977b-v59rf\" (UID: \"10bd6456-bd70-4e1a-a6e0-5eff23c9fec6\") " pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.088784 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2kk6\" (UniqueName: \"kubernetes.io/projected/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-kube-api-access-b2kk6\") pod \"openstack-baremetal-operator-controller-manager-77868f484-qs7zf\" (UID: \"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.088824 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2b8f\" (UniqueName: \"kubernetes.io/projected/a4e6faf7-1a69-45ea-ab85-23acffcd5cf0-kube-api-access-w2b8f\") pod \"ovn-operator-controller-manager-5b67cfc8fb-8fhvk\" (UID: \"a4e6faf7-1a69-45ea-ab85-23acffcd5cf0\") " pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" Dec 13 03:26:36 crc kubenswrapper[5070]: E1213 03:26:36.089156 5070 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 03:26:36 crc kubenswrapper[5070]: E1213 03:26:36.090948 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-cert podName:bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a nodeName:}" failed. No retries permitted until 2025-12-13 03:26:36.590905952 +0000 UTC m=+888.826749528 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-cert") pod "openstack-baremetal-operator-controller-manager-77868f484-qs7zf" (UID: "bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.116106 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.117159 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.119795 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-rz9m2" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.123806 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2kk6\" (UniqueName: \"kubernetes.io/projected/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-kube-api-access-b2kk6\") pod \"openstack-baremetal-operator-controller-manager-77868f484-qs7zf\" (UID: \"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.133973 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjcrz\" (UniqueName: \"kubernetes.io/projected/10bd6456-bd70-4e1a-a6e0-5eff23c9fec6-kube-api-access-sjcrz\") pod \"placement-operator-controller-manager-867d87977b-v59rf\" (UID: \"10bd6456-bd70-4e1a-a6e0-5eff23c9fec6\") " pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.134506 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2b8f\" (UniqueName: \"kubernetes.io/projected/a4e6faf7-1a69-45ea-ab85-23acffcd5cf0-kube-api-access-w2b8f\") pod \"ovn-operator-controller-manager-5b67cfc8fb-8fhvk\" (UID: \"a4e6faf7-1a69-45ea-ab85-23acffcd5cf0\") " pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.139505 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.142382 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.145823 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-bzmjd" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.190056 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.190914 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxxzr\" (UniqueName: \"kubernetes.io/projected/515c6c6d-7b89-4ee4-a1eb-d6ed51834050-kube-api-access-gxxzr\") pod \"swift-operator-controller-manager-8f6687c44-lcwtx\" (UID: \"515c6c6d-7b89-4ee4-a1eb-d6ed51834050\") " pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.190965 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4fadcc6f-e200-444e-b1d4-e195467c129d-cert\") pod \"infra-operator-controller-manager-85d55b5858-xjz7n\" (UID: \"4fadcc6f-e200-444e-b1d4-e195467c129d\") " pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.190994 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwfz2\" (UniqueName: \"kubernetes.io/projected/42bdbde2-ee6a-4260-8088-4298757880e1-kube-api-access-fwfz2\") pod \"telemetry-operator-controller-manager-695797c565-nh8lk\" (UID: \"42bdbde2-ee6a-4260-8088-4298757880e1\") " pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.215532 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4fadcc6f-e200-444e-b1d4-e195467c129d-cert\") pod \"infra-operator-controller-manager-85d55b5858-xjz7n\" (UID: \"4fadcc6f-e200-444e-b1d4-e195467c129d\") " pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.216015 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.228897 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.263009 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxxzr\" (UniqueName: \"kubernetes.io/projected/515c6c6d-7b89-4ee4-a1eb-d6ed51834050-kube-api-access-gxxzr\") pod \"swift-operator-controller-manager-8f6687c44-lcwtx\" (UID: \"515c6c6d-7b89-4ee4-a1eb-d6ed51834050\") " pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.263671 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwfz2\" (UniqueName: \"kubernetes.io/projected/42bdbde2-ee6a-4260-8088-4298757880e1-kube-api-access-fwfz2\") pod \"telemetry-operator-controller-manager-695797c565-nh8lk\" (UID: \"42bdbde2-ee6a-4260-8088-4298757880e1\") " pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.265293 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.265375 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.265388 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.278979 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.295901 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctxvt\" (UniqueName: \"kubernetes.io/projected/875771e8-6d22-42d9-89bc-614fdd9e41fa-kube-api-access-ctxvt\") pod \"watcher-operator-controller-manager-6b56b8849f-8svhb\" (UID: \"875771e8-6d22-42d9-89bc-614fdd9e41fa\") " pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.301262 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v9ht\" (UniqueName: \"kubernetes.io/projected/69dbad02-02d2-4a9c-befc-bf082990eca7-kube-api-access-7v9ht\") pod \"test-operator-controller-manager-bb86466d8-qhqhn\" (UID: \"69dbad02-02d2-4a9c-befc-bf082990eca7\") " pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.334902 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.364057 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.374255 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.375465 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.386941 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.386949 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-5vxcb" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.388762 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.398717 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.405506 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.406104 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4n79\" (UniqueName: \"kubernetes.io/projected/3728efb1-b3b6-4c34-b375-6f3feb0b26a7-kube-api-access-t4n79\") pod \"openstack-operator-controller-manager-67ccbf7849-8gcsf\" (UID: \"3728efb1-b3b6-4c34-b375-6f3feb0b26a7\") " pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.406126 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v9ht\" (UniqueName: \"kubernetes.io/projected/69dbad02-02d2-4a9c-befc-bf082990eca7-kube-api-access-7v9ht\") pod \"test-operator-controller-manager-bb86466d8-qhqhn\" (UID: \"69dbad02-02d2-4a9c-befc-bf082990eca7\") " pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.406152 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctxvt\" (UniqueName: \"kubernetes.io/projected/875771e8-6d22-42d9-89bc-614fdd9e41fa-kube-api-access-ctxvt\") pod \"watcher-operator-controller-manager-6b56b8849f-8svhb\" (UID: \"875771e8-6d22-42d9-89bc-614fdd9e41fa\") " pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.406218 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3728efb1-b3b6-4c34-b375-6f3feb0b26a7-cert\") pod \"openstack-operator-controller-manager-67ccbf7849-8gcsf\" (UID: \"3728efb1-b3b6-4c34-b375-6f3feb0b26a7\") " pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.411639 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.411734 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.420064 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-bgp6r" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.433658 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.434092 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.439587 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctxvt\" (UniqueName: \"kubernetes.io/projected/875771e8-6d22-42d9-89bc-614fdd9e41fa-kube-api-access-ctxvt\") pod \"watcher-operator-controller-manager-6b56b8849f-8svhb\" (UID: \"875771e8-6d22-42d9-89bc-614fdd9e41fa\") " pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.440022 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v9ht\" (UniqueName: \"kubernetes.io/projected/69dbad02-02d2-4a9c-befc-bf082990eca7-kube-api-access-7v9ht\") pod \"test-operator-controller-manager-bb86466d8-qhqhn\" (UID: \"69dbad02-02d2-4a9c-befc-bf082990eca7\") " pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.469689 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.510168 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3728efb1-b3b6-4c34-b375-6f3feb0b26a7-cert\") pod \"openstack-operator-controller-manager-67ccbf7849-8gcsf\" (UID: \"3728efb1-b3b6-4c34-b375-6f3feb0b26a7\") " pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.510233 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4n79\" (UniqueName: \"kubernetes.io/projected/3728efb1-b3b6-4c34-b375-6f3feb0b26a7-kube-api-access-t4n79\") pod \"openstack-operator-controller-manager-67ccbf7849-8gcsf\" (UID: \"3728efb1-b3b6-4c34-b375-6f3feb0b26a7\") " pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.514551 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.514771 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3728efb1-b3b6-4c34-b375-6f3feb0b26a7-cert\") pod \"openstack-operator-controller-manager-67ccbf7849-8gcsf\" (UID: \"3728efb1-b3b6-4c34-b375-6f3feb0b26a7\") " pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.527078 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4n79\" (UniqueName: \"kubernetes.io/projected/3728efb1-b3b6-4c34-b375-6f3feb0b26a7-kube-api-access-t4n79\") pod \"openstack-operator-controller-manager-67ccbf7849-8gcsf\" (UID: \"3728efb1-b3b6-4c34-b375-6f3feb0b26a7\") " pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.611331 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-cert\") pod \"openstack-baremetal-operator-controller-manager-77868f484-qs7zf\" (UID: \"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.611424 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psq8q\" (UniqueName: \"kubernetes.io/projected/890ff40a-bb3f-435f-b823-a4e93bf712c0-kube-api-access-psq8q\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-2c52x\" (UID: \"890ff40a-bb3f-435f-b823-a4e93bf712c0\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" Dec 13 03:26:36 crc kubenswrapper[5070]: E1213 03:26:36.611584 5070 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 03:26:36 crc kubenswrapper[5070]: E1213 03:26:36.612083 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-cert podName:bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a nodeName:}" failed. No retries permitted until 2025-12-13 03:26:37.612041385 +0000 UTC m=+889.847884941 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-cert") pod "openstack-baremetal-operator-controller-manager-77868f484-qs7zf" (UID: "bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.702900 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.708156 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.714728 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psq8q\" (UniqueName: \"kubernetes.io/projected/890ff40a-bb3f-435f-b823-a4e93bf712c0-kube-api-access-psq8q\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-2c52x\" (UID: \"890ff40a-bb3f-435f-b823-a4e93bf712c0\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.730952 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l"] Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.743493 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psq8q\" (UniqueName: \"kubernetes.io/projected/890ff40a-bb3f-435f-b823-a4e93bf712c0-kube-api-access-psq8q\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-2c52x\" (UID: \"890ff40a-bb3f-435f-b823-a4e93bf712c0\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.760172 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" Dec 13 03:26:36 crc kubenswrapper[5070]: W1213 03:26:36.784196 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66c61c3a_a7b0_4a4a_b086_9c1531a9b165.slice/crio-2cd5f8dbce6c1d1ac3d8f6aa8b9d012ec7971ad0665da6fffba0f3d919e4dbdd WatchSource:0}: Error finding container 2cd5f8dbce6c1d1ac3d8f6aa8b9d012ec7971ad0665da6fffba0f3d919e4dbdd: Status 404 returned error can't find the container with id 2cd5f8dbce6c1d1ac3d8f6aa8b9d012ec7971ad0665da6fffba0f3d919e4dbdd Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.819735 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" event={"ID":"66c61c3a-a7b0-4a4a-b086-9c1531a9b165","Type":"ContainerStarted","Data":"2cd5f8dbce6c1d1ac3d8f6aa8b9d012ec7971ad0665da6fffba0f3d919e4dbdd"} Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.821901 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" event={"ID":"460be9d3-0ac8-4080-ba44-48db8452a323","Type":"ContainerStarted","Data":"0703dbbbcbf322ca3a8e1b5a7a79f8fd81bcfb25cbc5e26c6aec46f1e318fa60"} Dec 13 03:26:36 crc kubenswrapper[5070]: I1213 03:26:36.860710 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh"] Dec 13 03:26:36 crc kubenswrapper[5070]: W1213 03:26:36.894615 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeac9d917_6a87_4f79_9758_15984dd71e23.slice/crio-05c6f6b5c907c03f9a01858669c4bc161fb609075aa095a4c41e6d885c3f0124 WatchSource:0}: Error finding container 05c6f6b5c907c03f9a01858669c4bc161fb609075aa095a4c41e6d885c3f0124: Status 404 returned error can't find the container with id 05c6f6b5c907c03f9a01858669c4bc161fb609075aa095a4c41e6d885c3f0124 Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.037156 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4"] Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.042227 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf0aabbc_0421_41d7_ac6b_540a4bd7121e.slice/crio-04610e5172a6c9eb5875aeee59ef8e75bd467d963eea2957599243c644759a58 WatchSource:0}: Error finding container 04610e5172a6c9eb5875aeee59ef8e75bd467d963eea2957599243c644759a58: Status 404 returned error can't find the container with id 04610e5172a6c9eb5875aeee59ef8e75bd467d963eea2957599243c644759a58 Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.060179 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh"] Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.062240 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5848798f_7e27_4b39_b60d_84edb77c765d.slice/crio-58feaffca48c18c0460cd420c189dbc74140c5666fbf8ef7c3148729d841973a WatchSource:0}: Error finding container 58feaffca48c18c0460cd420c189dbc74140c5666fbf8ef7c3148729d841973a: Status 404 returned error can't find the container with id 58feaffca48c18c0460cd420c189dbc74140c5666fbf8ef7c3148729d841973a Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.065780 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.525262 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.534500 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.543117 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.555775 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.562698 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-867d87977b-v59rf"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.569594 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk"] Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.579400 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42bdbde2_ee6a_4260_8088_4298757880e1.slice/crio-833ffcb2146a02c205e79781c1d9d3a403e6027ae8db3a2748d550e7b1724dfc WatchSource:0}: Error finding container 833ffcb2146a02c205e79781c1d9d3a403e6027ae8db3a2748d550e7b1724dfc: Status 404 returned error can't find the container with id 833ffcb2146a02c205e79781c1d9d3a403e6027ae8db3a2748d550e7b1724dfc Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.583860 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.590724 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.594870 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.599281 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.604559 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9"] Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.606971 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod714804fd_d184_418f_a12f_efaec040cef1.slice/crio-fc81bfab4b031a9e63eb98d6531f3bf99ca6f37ff4cddfd80f903bb94bbca32a WatchSource:0}: Error finding container fc81bfab4b031a9e63eb98d6531f3bf99ca6f37ff4cddfd80f903bb94bbca32a: Status 404 returned error can't find the container with id fc81bfab4b031a9e63eb98d6531f3bf99ca6f37ff4cddfd80f903bb94bbca32a Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.611395 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb"] Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.613991 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69dbad02_02d2_4a9c_befc_bf082990eca7.slice/crio-0e9d1020a7618fe733ee06bef7ba535720ee17a9aa2819bcf5ea5dc0e5922404 WatchSource:0}: Error finding container 0e9d1020a7618fe733ee06bef7ba535720ee17a9aa2819bcf5ea5dc0e5922404: Status 404 returned error can't find the container with id 0e9d1020a7618fe733ee06bef7ba535720ee17a9aa2819bcf5ea5dc0e5922404 Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.619879 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfd2f621_2f11_4b93_8b02_1ed72c06bb11.slice/crio-0c6b54b1eb4b8d4efedc1f331171eaecbe22889260cfe5b0e0dd6b15b16efc68 WatchSource:0}: Error finding container 0c6b54b1eb4b8d4efedc1f331171eaecbe22889260cfe5b0e0dd6b15b16efc68: Status 404 returned error can't find the container with id 0c6b54b1eb4b8d4efedc1f331171eaecbe22889260cfe5b0e0dd6b15b16efc68 Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.626910 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10bd6456_bd70_4e1a_a6e0_5eff23c9fec6.slice/crio-d78ce411853c1c177c3cd123cadc85c448140a59a764c4c35cb1aa0e5e5beeb2 WatchSource:0}: Error finding container d78ce411853c1c177c3cd123cadc85c448140a59a764c4c35cb1aa0e5e5beeb2: Status 404 returned error can't find the container with id d78ce411853c1c177c3cd123cadc85c448140a59a764c4c35cb1aa0e5e5beeb2 Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.639206 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-cert\") pod \"openstack-baremetal-operator-controller-manager-77868f484-qs7zf\" (UID: \"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.645161 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a-cert\") pod \"openstack-baremetal-operator-controller-manager-77868f484-qs7zf\" (UID: \"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.649753 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda037ecf5_dd37_4305_93a6_e28771e8df87.slice/crio-c7ee93a6ee377db65022efb33f91e91133689659f514160bad68506cf2834eb9 WatchSource:0}: Error finding container c7ee93a6ee377db65022efb33f91e91133689659f514160bad68506cf2834eb9: Status 404 returned error can't find the container with id c7ee93a6ee377db65022efb33f91e91133689659f514160bad68506cf2834eb9 Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.651638 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod875771e8_6d22_42d9_89bc_614fdd9e41fa.slice/crio-27c69fb0445c01c38017246d92a1cb8a5ee9b1bd7ec77d7a5fc37780a183e937 WatchSource:0}: Error finding container 27c69fb0445c01c38017246d92a1cb8a5ee9b1bd7ec77d7a5fc37780a183e937: Status 404 returned error can't find the container with id 27c69fb0445c01c38017246d92a1cb8a5ee9b1bd7ec77d7a5fc37780a183e937 Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.651769 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf"] Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.651964 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26894e71_7711_47cc_afe2_44f1d0657000.slice/crio-7c6407e4c21e6c6c6a8050dd3d272e53fb454a5fa78282d9cfc039af6cfc8b93 WatchSource:0}: Error finding container 7c6407e4c21e6c6c6a8050dd3d272e53fb454a5fa78282d9cfc039af6cfc8b93: Status 404 returned error can't find the container with id 7c6407e4c21e6c6c6a8050dd3d272e53fb454a5fa78282d9cfc039af6cfc8b93 Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.655375 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:1988aaf9cd245150cda123aaaa21718ccb552c47f1623b7d68804f13c47f2c6a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ctxvt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6b56b8849f-8svhb_openstack-operators(875771e8-6d22-42d9-89bc-614fdd9e41fa): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.655833 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7"] Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.655924 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:92b0c00727fb91c13331b1fa908252ad17e5f7f0050aee0f3cf988b5d2f61cbd,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mtmdq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-85d55b5858-xjz7n_openstack-operators(4fadcc6f-e200-444e-b1d4-e195467c129d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.656035 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:fd917de0cf800ec284ee0c3f2906a06d85ea18cb75a5b06c8eb305750467986d,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sjcrz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-867d87977b-v59rf_openstack-operators(10bd6456-bd70-4e1a-a6e0-5eff23c9fec6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.657406 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:4f799c74da2f1c864af24fcd5efd91ec64848972a95246eac6b5c6c4d71c1756,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qjn77,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-79cc9d59f5-tnnbf_openstack-operators(a037ecf5-dd37-4305-93a6-e28771e8df87): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.660717 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:2c4fe20e044dd8ea1f60f2f3f5e3844d932b4b79439835bd8771c73f16b38312,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-56plt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-64d7c556cd-9xzl9_openstack-operators(26894e71-7711-47cc-afe2-44f1d0657000): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.664844 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x"] Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.672131 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf"] Dec 13 03:26:37 crc kubenswrapper[5070]: W1213 03:26:37.678992 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod954cafe5_7d98_4ccd_ad79_2c928fa65dc1.slice/crio-694fde3157f55309b2d7febff0fab9bf2641cc48fc1f22fe0506e9f1aef41800 WatchSource:0}: Error finding container 694fde3157f55309b2d7febff0fab9bf2641cc48fc1f22fe0506e9f1aef41800: Status 404 returned error can't find the container with id 694fde3157f55309b2d7febff0fab9bf2641cc48fc1f22fe0506e9f1aef41800 Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.691305 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-psq8q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-2c52x_openstack-operators(890ff40a-bb3f-435f-b823-a4e93bf712c0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.691369 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:5245e851b4476baecd4173eca3e8669ac09ec69d36ad1ebc3a0f867713cbc14b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x2jcr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-d5fb87cb8-5mqm7_openstack-operators(954cafe5-7d98-4ccd-ad79-2c928fa65dc1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.694757 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" podUID="890ff40a-bb3f-435f-b823-a4e93bf712c0" Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.818556 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.832642 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" event={"ID":"cfd2f621-2f11-4b93-8b02-1ed72c06bb11","Type":"ContainerStarted","Data":"0c6b54b1eb4b8d4efedc1f331171eaecbe22889260cfe5b0e0dd6b15b16efc68"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.836785 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" event={"ID":"62ea5d75-2d09-414d-b682-6ed196245ea9","Type":"ContainerStarted","Data":"10c3ce29bba33d099a75e0df54151dd86dea78d36a7ce21aea77e709a976f874"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.848982 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" event={"ID":"890ff40a-bb3f-435f-b823-a4e93bf712c0","Type":"ContainerStarted","Data":"c6a2a7d1d3f13815872ff1ff9f6a4f384f6753f982bb29aa179c6075aa1f859e"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.851356 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" event={"ID":"a037ecf5-dd37-4305-93a6-e28771e8df87","Type":"ContainerStarted","Data":"c7ee93a6ee377db65022efb33f91e91133689659f514160bad68506cf2834eb9"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.853985 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" event={"ID":"4fadcc6f-e200-444e-b1d4-e195467c129d","Type":"ContainerStarted","Data":"3c660c781d1f1864480e44ebb2beb67c2e28dba364be06e3eb6abb37a739013d"} Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.855400 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" podUID="890ff40a-bb3f-435f-b823-a4e93bf712c0" Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.863295 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" event={"ID":"5848798f-7e27-4b39-b60d-84edb77c765d","Type":"ContainerStarted","Data":"58feaffca48c18c0460cd420c189dbc74140c5666fbf8ef7c3148729d841973a"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.866756 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" event={"ID":"26894e71-7711-47cc-afe2-44f1d0657000","Type":"ContainerStarted","Data":"7c6407e4c21e6c6c6a8050dd3d272e53fb454a5fa78282d9cfc039af6cfc8b93"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.868068 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" event={"ID":"875771e8-6d22-42d9-89bc-614fdd9e41fa","Type":"ContainerStarted","Data":"27c69fb0445c01c38017246d92a1cb8a5ee9b1bd7ec77d7a5fc37780a183e937"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.869700 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" event={"ID":"954cafe5-7d98-4ccd-ad79-2c928fa65dc1","Type":"ContainerStarted","Data":"694fde3157f55309b2d7febff0fab9bf2641cc48fc1f22fe0506e9f1aef41800"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.871634 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" event={"ID":"3728efb1-b3b6-4c34-b375-6f3feb0b26a7","Type":"ContainerStarted","Data":"19df4a3e6170c4931caccec43aad7839b75868200e58c3a681d85c0c49141850"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.873533 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" event={"ID":"a4e6faf7-1a69-45ea-ab85-23acffcd5cf0","Type":"ContainerStarted","Data":"16e62a08bd9b74d1afe7fc4ab35178ac61523c580fb926e401e5c2c8018ae1bd"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.874802 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" event={"ID":"515c6c6d-7b89-4ee4-a1eb-d6ed51834050","Type":"ContainerStarted","Data":"baf054ab1420180ff79a0b0bc2016e7c5ac8ae09b35246b9ea10ce1914e35bd8"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.877853 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" event={"ID":"df0aabbc-0421-41d7-ac6b-540a4bd7121e","Type":"ContainerStarted","Data":"04610e5172a6c9eb5875aeee59ef8e75bd467d963eea2957599243c644759a58"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.879110 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" event={"ID":"17401b15-4810-4d28-9244-f1ef166c3278","Type":"ContainerStarted","Data":"ebda90e34299582d2cd0ecd7861b8dfa1a89a368a4238851c98901c56a6874c8"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.880458 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" event={"ID":"69dbad02-02d2-4a9c-befc-bf082990eca7","Type":"ContainerStarted","Data":"0e9d1020a7618fe733ee06bef7ba535720ee17a9aa2819bcf5ea5dc0e5922404"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.885461 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" event={"ID":"42bdbde2-ee6a-4260-8088-4298757880e1","Type":"ContainerStarted","Data":"833ffcb2146a02c205e79781c1d9d3a403e6027ae8db3a2748d550e7b1724dfc"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.887097 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" event={"ID":"714804fd-d184-418f-a12f-efaec040cef1","Type":"ContainerStarted","Data":"fc81bfab4b031a9e63eb98d6531f3bf99ca6f37ff4cddfd80f903bb94bbca32a"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.888862 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" event={"ID":"10bd6456-bd70-4e1a-a6e0-5eff23c9fec6","Type":"ContainerStarted","Data":"d78ce411853c1c177c3cd123cadc85c448140a59a764c4c35cb1aa0e5e5beeb2"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.893908 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" event={"ID":"17d2a89b-feed-4eae-bbc3-5296e677ef48","Type":"ContainerStarted","Data":"2174390bd789927386cc360c0be2f542d5a038305d89ebbbca96eacf590b10ec"} Dec 13 03:26:37 crc kubenswrapper[5070]: I1213 03:26:37.895773 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" event={"ID":"eac9d917-6a87-4f79-9758-15984dd71e23","Type":"ContainerStarted","Data":"05c6f6b5c907c03f9a01858669c4bc161fb609075aa095a4c41e6d885c3f0124"} Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.934173 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" podUID="4fadcc6f-e200-444e-b1d4-e195467c129d" Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.965055 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" podUID="a037ecf5-dd37-4305-93a6-e28771e8df87" Dec 13 03:26:37 crc kubenswrapper[5070]: E1213 03:26:37.993759 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" podUID="10bd6456-bd70-4e1a-a6e0-5eff23c9fec6" Dec 13 03:26:38 crc kubenswrapper[5070]: E1213 03:26:38.006590 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" podUID="875771e8-6d22-42d9-89bc-614fdd9e41fa" Dec 13 03:26:38 crc kubenswrapper[5070]: E1213 03:26:38.027113 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" podUID="26894e71-7711-47cc-afe2-44f1d0657000" Dec 13 03:26:38 crc kubenswrapper[5070]: E1213 03:26:38.030360 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" podUID="954cafe5-7d98-4ccd-ad79-2c928fa65dc1" Dec 13 03:26:38 crc kubenswrapper[5070]: I1213 03:26:38.125125 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf"] Dec 13 03:26:38 crc kubenswrapper[5070]: I1213 03:26:38.911920 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" event={"ID":"10bd6456-bd70-4e1a-a6e0-5eff23c9fec6","Type":"ContainerStarted","Data":"8f5f28c4dfe66e5b313b10ef7781e5db94c8f782c38c0c1de55bfbbd8c1cf2c3"} Dec 13 03:26:38 crc kubenswrapper[5070]: E1213 03:26:38.916702 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:fd917de0cf800ec284ee0c3f2906a06d85ea18cb75a5b06c8eb305750467986d\\\"\"" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" podUID="10bd6456-bd70-4e1a-a6e0-5eff23c9fec6" Dec 13 03:26:38 crc kubenswrapper[5070]: I1213 03:26:38.937185 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" event={"ID":"a037ecf5-dd37-4305-93a6-e28771e8df87","Type":"ContainerStarted","Data":"6f80ab148ab01fe350706820793dde672899846e458a9957a2b7d9fbcb0669d4"} Dec 13 03:26:38 crc kubenswrapper[5070]: E1213 03:26:38.939383 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:4f799c74da2f1c864af24fcd5efd91ec64848972a95246eac6b5c6c4d71c1756\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" podUID="a037ecf5-dd37-4305-93a6-e28771e8df87" Dec 13 03:26:38 crc kubenswrapper[5070]: I1213 03:26:38.957103 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" event={"ID":"3728efb1-b3b6-4c34-b375-6f3feb0b26a7","Type":"ContainerStarted","Data":"2f7c63031420ec4335374578846b17cff2f1a35acbfe85485ace3952cec5411f"} Dec 13 03:26:38 crc kubenswrapper[5070]: I1213 03:26:38.957362 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" event={"ID":"3728efb1-b3b6-4c34-b375-6f3feb0b26a7","Type":"ContainerStarted","Data":"48dd83e90a3309479f111ec1abfc92b175b032d76637c12d7fc45517b24b209d"} Dec 13 03:26:38 crc kubenswrapper[5070]: I1213 03:26:38.957607 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" Dec 13 03:26:38 crc kubenswrapper[5070]: I1213 03:26:38.960655 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" event={"ID":"4fadcc6f-e200-444e-b1d4-e195467c129d","Type":"ContainerStarted","Data":"f22069e52eb4cdd9aa870002c96299bd208f2c29f5be5b7bab2c3a1aae310057"} Dec 13 03:26:38 crc kubenswrapper[5070]: E1213 03:26:38.978791 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:92b0c00727fb91c13331b1fa908252ad17e5f7f0050aee0f3cf988b5d2f61cbd\\\"\"" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" podUID="4fadcc6f-e200-444e-b1d4-e195467c129d" Dec 13 03:26:38 crc kubenswrapper[5070]: I1213 03:26:38.996655 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" event={"ID":"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a","Type":"ContainerStarted","Data":"1a5d83dc6895ae9693ac54e6aca25ce4ef56871c92d822e41527d8609c633fbf"} Dec 13 03:26:39 crc kubenswrapper[5070]: I1213 03:26:39.021531 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" event={"ID":"26894e71-7711-47cc-afe2-44f1d0657000","Type":"ContainerStarted","Data":"1c5532885e6ce2453db974fa356f4b51e60af37fb958c0a857040a1ca52ad454"} Dec 13 03:26:39 crc kubenswrapper[5070]: E1213 03:26:39.050704 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:2c4fe20e044dd8ea1f60f2f3f5e3844d932b4b79439835bd8771c73f16b38312\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" podUID="26894e71-7711-47cc-afe2-44f1d0657000" Dec 13 03:26:39 crc kubenswrapper[5070]: I1213 03:26:39.073098 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" event={"ID":"875771e8-6d22-42d9-89bc-614fdd9e41fa","Type":"ContainerStarted","Data":"6b14caf6470ca5bff44c70d9c14999389be3d69eeb85570e99bab0ce5055a84c"} Dec 13 03:26:39 crc kubenswrapper[5070]: E1213 03:26:39.077694 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:1988aaf9cd245150cda123aaaa21718ccb552c47f1623b7d68804f13c47f2c6a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" podUID="875771e8-6d22-42d9-89bc-614fdd9e41fa" Dec 13 03:26:39 crc kubenswrapper[5070]: I1213 03:26:39.109925 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" event={"ID":"954cafe5-7d98-4ccd-ad79-2c928fa65dc1","Type":"ContainerStarted","Data":"f448c0568b7a4b6c94d2c65dc33ae89c0f25466d5785cb685bb349e289cb4ab5"} Dec 13 03:26:39 crc kubenswrapper[5070]: E1213 03:26:39.112524 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" podUID="890ff40a-bb3f-435f-b823-a4e93bf712c0" Dec 13 03:26:39 crc kubenswrapper[5070]: E1213 03:26:39.112634 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:5245e851b4476baecd4173eca3e8669ac09ec69d36ad1ebc3a0f867713cbc14b\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" podUID="954cafe5-7d98-4ccd-ad79-2c928fa65dc1" Dec 13 03:26:39 crc kubenswrapper[5070]: I1213 03:26:39.384015 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" podStartSLOduration=3.383996453 podStartE2EDuration="3.383996453s" podCreationTimestamp="2025-12-13 03:26:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:26:39.355950396 +0000 UTC m=+891.591793942" watchObservedRunningTime="2025-12-13 03:26:39.383996453 +0000 UTC m=+891.619839999" Dec 13 03:26:40 crc kubenswrapper[5070]: E1213 03:26:40.130535 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:2c4fe20e044dd8ea1f60f2f3f5e3844d932b4b79439835bd8771c73f16b38312\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" podUID="26894e71-7711-47cc-afe2-44f1d0657000" Dec 13 03:26:40 crc kubenswrapper[5070]: E1213 03:26:40.131080 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:4f799c74da2f1c864af24fcd5efd91ec64848972a95246eac6b5c6c4d71c1756\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" podUID="a037ecf5-dd37-4305-93a6-e28771e8df87" Dec 13 03:26:40 crc kubenswrapper[5070]: E1213 03:26:40.131114 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:fd917de0cf800ec284ee0c3f2906a06d85ea18cb75a5b06c8eb305750467986d\\\"\"" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" podUID="10bd6456-bd70-4e1a-a6e0-5eff23c9fec6" Dec 13 03:26:40 crc kubenswrapper[5070]: E1213 03:26:40.131156 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:1988aaf9cd245150cda123aaaa21718ccb552c47f1623b7d68804f13c47f2c6a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" podUID="875771e8-6d22-42d9-89bc-614fdd9e41fa" Dec 13 03:26:40 crc kubenswrapper[5070]: E1213 03:26:40.131230 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:5245e851b4476baecd4173eca3e8669ac09ec69d36ad1ebc3a0f867713cbc14b\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" podUID="954cafe5-7d98-4ccd-ad79-2c928fa65dc1" Dec 13 03:26:40 crc kubenswrapper[5070]: E1213 03:26:40.132313 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:92b0c00727fb91c13331b1fa908252ad17e5f7f0050aee0f3cf988b5d2f61cbd\\\"\"" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" podUID="4fadcc6f-e200-444e-b1d4-e195467c129d" Dec 13 03:26:46 crc kubenswrapper[5070]: I1213 03:26:46.709139 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-67ccbf7849-8gcsf" Dec 13 03:26:50 crc kubenswrapper[5070]: I1213 03:26:50.168029 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 03:26:52 crc kubenswrapper[5070]: E1213 03:26:52.086898 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:8aaaf8bb0a81358ee196af922d534c9b3f6bb47b27f4283087f7e0254638a671" Dec 13 03:26:52 crc kubenswrapper[5070]: E1213 03:26:52.087098 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:8aaaf8bb0a81358ee196af922d534c9b3f6bb47b27f4283087f7e0254638a671,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vsgd8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-6788cc6d75-8hczh_openstack-operators(5848798f-7e27-4b39-b60d-84edb77c765d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:26:52 crc kubenswrapper[5070]: E1213 03:26:52.661899 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:c5394efcfaeddc4231f98f1ed5267b77a8687038064cfb4302bcd0c8d6587856" Dec 13 03:26:52 crc kubenswrapper[5070]: E1213 03:26:52.662143 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:c5394efcfaeddc4231f98f1ed5267b77a8687038064cfb4302bcd0c8d6587856,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dwb7c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-5bfbbb859d-blc2l_openstack-operators(460be9d3-0ac8-4080-ba44-48db8452a323): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:26:53 crc kubenswrapper[5070]: E1213 03:26:53.159224 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:2c837009de6475bc22534827c03df6d8649277b71f1c30de2087b6c52aafb326" Dec 13 03:26:53 crc kubenswrapper[5070]: E1213 03:26:53.159743 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:2c837009de6475bc22534827c03df6d8649277b71f1c30de2087b6c52aafb326,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-w2b8f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-5b67cfc8fb-8fhvk_openstack-operators(a4e6faf7-1a69-45ea-ab85-23acffcd5cf0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:27:02 crc kubenswrapper[5070]: E1213 03:27:02.390405 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:9aee58b2ca71ef9c4f12373090951090d13aa7038d0fef07ec30167f3d6ae23c" Dec 13 03:27:02 crc kubenswrapper[5070]: E1213 03:27:02.391325 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:9aee58b2ca71ef9c4f12373090951090d13aa7038d0fef07ec30167f3d6ae23c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5zg22,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-54485f899-6ccdh_openstack-operators(eac9d917-6a87-4f79-9758-15984dd71e23): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:27:02 crc kubenswrapper[5070]: E1213 03:27:02.454505 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.18:5001/openstack-k8s-operators/cinder-operator:c717723667c63a9180d3c76f68061c82fd413162" Dec 13 03:27:02 crc kubenswrapper[5070]: E1213 03:27:02.454582 5070 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.18:5001/openstack-k8s-operators/cinder-operator:c717723667c63a9180d3c76f68061c82fd413162" Dec 13 03:27:02 crc kubenswrapper[5070]: E1213 03:27:02.454811 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.18:5001/openstack-k8s-operators/cinder-operator:c717723667c63a9180d3c76f68061c82fd413162,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-56tl6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-75f975cf8c-wb8p4_openstack-operators(df0aabbc-0421-41d7-ac6b-540a4bd7121e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:27:03 crc kubenswrapper[5070]: E1213 03:27:03.542087 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:57d9cb0034a7d5c7a39410fcb619ade2010e6855344dc3a0bc2bfd98cdf345d8" Dec 13 03:27:03 crc kubenswrapper[5070]: E1213 03:27:03.542631 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:57d9cb0034a7d5c7a39410fcb619ade2010e6855344dc3a0bc2bfd98cdf345d8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rj7mm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-5cbc8c7f96-889ct_openstack-operators(66c61c3a-a7b0-4a4a-b086-9c1531a9b165): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:27:04 crc kubenswrapper[5070]: E1213 03:27:04.156145 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:49180c7bd4f0071e43ae7044260a3a97c4aa34fcbcb2d0d4573df449765ed391" Dec 13 03:27:04 crc kubenswrapper[5070]: E1213 03:27:04.156346 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:49180c7bd4f0071e43ae7044260a3a97c4aa34fcbcb2d0d4573df449765ed391,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7v9ht,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-bb86466d8-qhqhn_openstack-operators(69dbad02-02d2-4a9c-befc-bf082990eca7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:27:04 crc kubenswrapper[5070]: E1213 03:27:04.673600 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:f4b6baa2b8a661351cfc24fff5aacee5aa4198106618700cfa47ec3a75f88b31" Dec 13 03:27:04 crc kubenswrapper[5070]: E1213 03:27:04.673777 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:f4b6baa2b8a661351cfc24fff5aacee5aa4198106618700cfa47ec3a75f88b31,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mvcvm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-85fbd69fcd-98cmt_openstack-operators(17401b15-4810-4d28-9244-f1ef166c3278): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:27:09 crc kubenswrapper[5070]: E1213 03:27:09.924290 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 13 03:27:09 crc kubenswrapper[5070]: E1213 03:27:09.924835 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-psq8q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-2c52x_openstack-operators(890ff40a-bb3f-435f-b823-a4e93bf712c0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:27:09 crc kubenswrapper[5070]: E1213 03:27:09.926205 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" podUID="890ff40a-bb3f-435f-b823-a4e93bf712c0" Dec 13 03:27:10 crc kubenswrapper[5070]: E1213 03:27:10.314642 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" podUID="5848798f-7e27-4b39-b60d-84edb77c765d" Dec 13 03:27:10 crc kubenswrapper[5070]: I1213 03:27:10.338198 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" event={"ID":"5848798f-7e27-4b39-b60d-84edb77c765d","Type":"ContainerStarted","Data":"e3039e7d43b50564f3c2ccb9b262ab3295273f4f0513d22eb9b0d42618678250"} Dec 13 03:27:10 crc kubenswrapper[5070]: E1213 03:27:10.453312 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" podUID="17401b15-4810-4d28-9244-f1ef166c3278" Dec 13 03:27:10 crc kubenswrapper[5070]: E1213 03:27:10.502754 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" podUID="eac9d917-6a87-4f79-9758-15984dd71e23" Dec 13 03:27:10 crc kubenswrapper[5070]: E1213 03:27:10.637654 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" podUID="66c61c3a-a7b0-4a4a-b086-9c1531a9b165" Dec 13 03:27:10 crc kubenswrapper[5070]: E1213 03:27:10.690313 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" podUID="df0aabbc-0421-41d7-ac6b-540a4bd7121e" Dec 13 03:27:10 crc kubenswrapper[5070]: E1213 03:27:10.720696 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" podUID="a4e6faf7-1a69-45ea-ab85-23acffcd5cf0" Dec 13 03:27:10 crc kubenswrapper[5070]: E1213 03:27:10.721708 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" podUID="460be9d3-0ac8-4080-ba44-48db8452a323" Dec 13 03:27:10 crc kubenswrapper[5070]: E1213 03:27:10.722162 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" podUID="69dbad02-02d2-4a9c-befc-bf082990eca7" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.347626 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" event={"ID":"714804fd-d184-418f-a12f-efaec040cef1","Type":"ContainerStarted","Data":"314a892e9cbe87138c5213506a9cfb7600ec3cfc18a614bccd59f5ba0ce6f50a"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.347710 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" event={"ID":"714804fd-d184-418f-a12f-efaec040cef1","Type":"ContainerStarted","Data":"7cbc1778a4bbbfc9521f90683873e304b5ac7de8b81bdaebe39bfe43a44633c1"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.348724 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.354256 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" event={"ID":"26894e71-7711-47cc-afe2-44f1d0657000","Type":"ContainerStarted","Data":"756fd4a3d7a574b87d5448d6be2270c325f2f2f1a4c9f8cf548b64a439ba9f06"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.355093 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.358355 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" event={"ID":"62ea5d75-2d09-414d-b682-6ed196245ea9","Type":"ContainerStarted","Data":"db48ac96d44eda5d33351ad54c72884afc44fe2b7b0c5dd85bbc6621da38fbb0"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.358398 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" event={"ID":"62ea5d75-2d09-414d-b682-6ed196245ea9","Type":"ContainerStarted","Data":"b5997683bd584575490265106277ee437acdff4d13183486f9bff206ce09187e"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.358960 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.360217 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" event={"ID":"cfd2f621-2f11-4b93-8b02-1ed72c06bb11","Type":"ContainerStarted","Data":"d93fd51c0758d49e0db139ddd845daec34a113726fe43e76ae7deb5b54fd5738"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.375983 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" event={"ID":"515c6c6d-7b89-4ee4-a1eb-d6ed51834050","Type":"ContainerStarted","Data":"dfa04fe0a3fbaf1fab76340125b5cf0e118a34c5cc4d73ad27c4ae3acc9a0847"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.381713 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" podStartSLOduration=7.455847371 podStartE2EDuration="36.381688819s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.61009703 +0000 UTC m=+889.845940576" lastFinishedPulling="2025-12-13 03:27:06.535938478 +0000 UTC m=+918.771782024" observedRunningTime="2025-12-13 03:27:11.375558612 +0000 UTC m=+923.611402168" watchObservedRunningTime="2025-12-13 03:27:11.381688819 +0000 UTC m=+923.617532365" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.391741 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" event={"ID":"17d2a89b-feed-4eae-bbc3-5296e677ef48","Type":"ContainerStarted","Data":"ed7e15a0faf6f1c8ca5a1737c8577c5ad80287b50d64cf59332d174cc869c6f5"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.407136 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" event={"ID":"460be9d3-0ac8-4080-ba44-48db8452a323","Type":"ContainerStarted","Data":"ee978184fa6f54fefd90378cf174ab9e6f2aa9ab2228c3762d238c9b7bedb78a"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.425256 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" event={"ID":"a4e6faf7-1a69-45ea-ab85-23acffcd5cf0","Type":"ContainerStarted","Data":"7ff8c0f5c3af643a9b6ec63dcc4fdf79d34f89a7edeb5f9579eb14351c7d7686"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.445176 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" podStartSLOduration=4.074832521 podStartE2EDuration="36.445153083s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.660621072 +0000 UTC m=+889.896464618" lastFinishedPulling="2025-12-13 03:27:10.030941624 +0000 UTC m=+922.266785180" observedRunningTime="2025-12-13 03:27:11.440020463 +0000 UTC m=+923.675864019" watchObservedRunningTime="2025-12-13 03:27:11.445153083 +0000 UTC m=+923.680996639" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.470375 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" event={"ID":"17401b15-4810-4d28-9244-f1ef166c3278","Type":"ContainerStarted","Data":"1b881027e1e42dd2e993530a55aefd0d4bf690ca0865a601ae0c0d2a20f85b5c"} Dec 13 03:27:11 crc kubenswrapper[5070]: E1213 03:27:11.484993 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:f4b6baa2b8a661351cfc24fff5aacee5aa4198106618700cfa47ec3a75f88b31\\\"\"" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" podUID="17401b15-4810-4d28-9244-f1ef166c3278" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.494155 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" podStartSLOduration=7.58429614 podStartE2EDuration="36.49412907s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.625900042 +0000 UTC m=+889.861743588" lastFinishedPulling="2025-12-13 03:27:06.535732952 +0000 UTC m=+918.771576518" observedRunningTime="2025-12-13 03:27:11.493303198 +0000 UTC m=+923.729146744" watchObservedRunningTime="2025-12-13 03:27:11.49412907 +0000 UTC m=+923.729972616" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.499200 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" event={"ID":"10bd6456-bd70-4e1a-a6e0-5eff23c9fec6","Type":"ContainerStarted","Data":"ecd605b04d44263fd15bc7165aff4974b9575bd0fcaa473c7a3e3286e439e103"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.499854 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.536974 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" event={"ID":"875771e8-6d22-42d9-89bc-614fdd9e41fa","Type":"ContainerStarted","Data":"4527ed5a6dc828e446e952407228736f201208b3146fac458be93f4a80a0b887"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.537812 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.555268 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" event={"ID":"954cafe5-7d98-4ccd-ad79-2c928fa65dc1","Type":"ContainerStarted","Data":"7812a9af8c8a9d3fe9c58bb0a5d40156bade06256f13db4b5e7921b468e7a60d"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.556005 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.563559 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" event={"ID":"69dbad02-02d2-4a9c-befc-bf082990eca7","Type":"ContainerStarted","Data":"f5212befe423d4ad3ae666ad322a1235dbd87a15817de17c4787b739a5ce9931"} Dec 13 03:27:11 crc kubenswrapper[5070]: E1213 03:27:11.565542 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:49180c7bd4f0071e43ae7044260a3a97c4aa34fcbcb2d0d4573df449765ed391\\\"\"" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" podUID="69dbad02-02d2-4a9c-befc-bf082990eca7" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.570107 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" event={"ID":"eac9d917-6a87-4f79-9758-15984dd71e23","Type":"ContainerStarted","Data":"b06e9d1c5de005740854afdae06bc72383bddee6503b370f2f46cf720a57b5ef"} Dec 13 03:27:11 crc kubenswrapper[5070]: E1213 03:27:11.575223 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:9aee58b2ca71ef9c4f12373090951090d13aa7038d0fef07ec30167f3d6ae23c\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" podUID="eac9d917-6a87-4f79-9758-15984dd71e23" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.588919 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" event={"ID":"a037ecf5-dd37-4305-93a6-e28771e8df87","Type":"ContainerStarted","Data":"954ba7ee5f38143b3092ec370c353385475d11037963b6eb227980976822c837"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.589683 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.593512 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" event={"ID":"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a","Type":"ContainerStarted","Data":"b9055145ab69e5013d0c4ee760cef8209b450a6e675c21d5af04f7838a90ded6"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.594059 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.595261 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" event={"ID":"4fadcc6f-e200-444e-b1d4-e195467c129d","Type":"ContainerStarted","Data":"303e46b6c60a9c8c3b8b6f724c39c5c08466a5c4ff21c1339d05f4239e718d47"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.595649 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.596897 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" event={"ID":"df0aabbc-0421-41d7-ac6b-540a4bd7121e","Type":"ContainerStarted","Data":"a7a074cfd3766e682d981fbde18703886e28df2ce2969bd3236961bc7179de71"} Dec 13 03:27:11 crc kubenswrapper[5070]: E1213 03:27:11.597979 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.18:5001/openstack-k8s-operators/cinder-operator:c717723667c63a9180d3c76f68061c82fd413162\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" podUID="df0aabbc-0421-41d7-ac6b-540a4bd7121e" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.644761 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" event={"ID":"42bdbde2-ee6a-4260-8088-4298757880e1","Type":"ContainerStarted","Data":"bb06ab739960908c12723be27c9eef6b628aac6c22944350770f0dedc7c739e9"} Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.645771 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.662325 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" event={"ID":"66c61c3a-a7b0-4a4a-b086-9c1531a9b165","Type":"ContainerStarted","Data":"f1f299a6038abfa70dc3787926c15583ca75a46049ec17a7817735979c18e9c4"} Dec 13 03:27:11 crc kubenswrapper[5070]: E1213 03:27:11.663522 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:57d9cb0034a7d5c7a39410fcb619ade2010e6855344dc3a0bc2bfd98cdf345d8\\\"\"" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" podUID="66c61c3a-a7b0-4a4a-b086-9c1531a9b165" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.779081 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" podStartSLOduration=8.411466123 podStartE2EDuration="36.779063404s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:38.16808369 +0000 UTC m=+890.403927226" lastFinishedPulling="2025-12-13 03:27:06.535680961 +0000 UTC m=+918.771524507" observedRunningTime="2025-12-13 03:27:11.770016406 +0000 UTC m=+924.005859952" watchObservedRunningTime="2025-12-13 03:27:11.779063404 +0000 UTC m=+924.014906950" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.912643 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" podStartSLOduration=4.612240539 podStartE2EDuration="36.912615001s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.655854831 +0000 UTC m=+889.891698377" lastFinishedPulling="2025-12-13 03:27:09.956229293 +0000 UTC m=+922.192072839" observedRunningTime="2025-12-13 03:27:11.906609747 +0000 UTC m=+924.142453313" watchObservedRunningTime="2025-12-13 03:27:11.912615001 +0000 UTC m=+924.148458547" Dec 13 03:27:11 crc kubenswrapper[5070]: I1213 03:27:11.954938 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" podStartSLOduration=4.656180569 podStartE2EDuration="36.954921066s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.65729232 +0000 UTC m=+889.893135866" lastFinishedPulling="2025-12-13 03:27:09.956032817 +0000 UTC m=+922.191876363" observedRunningTime="2025-12-13 03:27:11.948770738 +0000 UTC m=+924.184614284" watchObservedRunningTime="2025-12-13 03:27:11.954921066 +0000 UTC m=+924.190764612" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.008608 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" podStartSLOduration=4.734752318 podStartE2EDuration="37.008583063s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.691214308 +0000 UTC m=+889.927057854" lastFinishedPulling="2025-12-13 03:27:09.965045053 +0000 UTC m=+922.200888599" observedRunningTime="2025-12-13 03:27:12.003479293 +0000 UTC m=+924.239322859" watchObservedRunningTime="2025-12-13 03:27:12.008583063 +0000 UTC m=+924.244426609" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.112934 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" podStartSLOduration=4.676630569 podStartE2EDuration="37.112918243s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.655984575 +0000 UTC m=+889.891828121" lastFinishedPulling="2025-12-13 03:27:10.092272249 +0000 UTC m=+922.328115795" observedRunningTime="2025-12-13 03:27:12.10882977 +0000 UTC m=+924.344673326" watchObservedRunningTime="2025-12-13 03:27:12.112918243 +0000 UTC m=+924.348761789" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.156131 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" podStartSLOduration=4.722188794 podStartE2EDuration="37.156117952s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.655192844 +0000 UTC m=+889.891036390" lastFinishedPulling="2025-12-13 03:27:10.089121962 +0000 UTC m=+922.324965548" observedRunningTime="2025-12-13 03:27:12.153146621 +0000 UTC m=+924.388990167" watchObservedRunningTime="2025-12-13 03:27:12.156117952 +0000 UTC m=+924.391961498" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.286853 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" podStartSLOduration=10.70462215 podStartE2EDuration="37.286837343s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.58926228 +0000 UTC m=+889.825105826" lastFinishedPulling="2025-12-13 03:27:04.171477463 +0000 UTC m=+916.407321019" observedRunningTime="2025-12-13 03:27:12.212894604 +0000 UTC m=+924.448738150" watchObservedRunningTime="2025-12-13 03:27:12.286837343 +0000 UTC m=+924.522680889" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.729059 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" event={"ID":"460be9d3-0ac8-4080-ba44-48db8452a323","Type":"ContainerStarted","Data":"16983f22b7f5029cf45ea87a813b1a964681562c6bf5cd4ec279af98f19dcac3"} Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.729198 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.731151 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" event={"ID":"a4e6faf7-1a69-45ea-ab85-23acffcd5cf0","Type":"ContainerStarted","Data":"15fcb83f7d60f0a1f6a2254850c2a1d811632979a3f0e64122102794012a68f4"} Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.731278 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.732790 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" event={"ID":"515c6c6d-7b89-4ee4-a1eb-d6ed51834050","Type":"ContainerStarted","Data":"2566eea14c8532872f29ee7e6d5457c81e87e074576f3ce9b84dc49869a6c319"} Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.732847 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.744658 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" event={"ID":"5848798f-7e27-4b39-b60d-84edb77c765d","Type":"ContainerStarted","Data":"d19695e2e1cf11a52beb85b67e56308a582fd4ba90ac650112891e9a48589783"} Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.744795 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.745986 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" event={"ID":"42bdbde2-ee6a-4260-8088-4298757880e1","Type":"ContainerStarted","Data":"4fe3e20f1a9ab304831ec65f135bc3502ca373811f6a9d212523f98818704999"} Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.750631 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" event={"ID":"cfd2f621-2f11-4b93-8b02-1ed72c06bb11","Type":"ContainerStarted","Data":"da59667576bbd80cd13db90355ea77ac4cdb11156bfe6a4e28a2e2c5a05ca1b5"} Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.750750 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.752341 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" event={"ID":"17d2a89b-feed-4eae-bbc3-5296e677ef48","Type":"ContainerStarted","Data":"7074ab673d1d1428c6d74d09731d3a29c68ed685f8ac1a12116be74b04430c1d"} Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.752506 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.755041 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" event={"ID":"bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a","Type":"ContainerStarted","Data":"3a775848cb96ce3655e095724f3e4f9272a4b4f8aaf408b52db4b9a1481dc261"} Dec 13 03:27:12 crc kubenswrapper[5070]: E1213 03:27:12.758740 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:57d9cb0034a7d5c7a39410fcb619ade2010e6855344dc3a0bc2bfd98cdf345d8\\\"\"" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" podUID="66c61c3a-a7b0-4a4a-b086-9c1531a9b165" Dec 13 03:27:12 crc kubenswrapper[5070]: E1213 03:27:12.758780 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:f4b6baa2b8a661351cfc24fff5aacee5aa4198106618700cfa47ec3a75f88b31\\\"\"" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" podUID="17401b15-4810-4d28-9244-f1ef166c3278" Dec 13 03:27:12 crc kubenswrapper[5070]: E1213 03:27:12.758797 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:49180c7bd4f0071e43ae7044260a3a97c4aa34fcbcb2d0d4573df449765ed391\\\"\"" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" podUID="69dbad02-02d2-4a9c-befc-bf082990eca7" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.774931 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" podStartSLOduration=3.789080956 podStartE2EDuration="37.774912505s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.065386743 +0000 UTC m=+889.301230289" lastFinishedPulling="2025-12-13 03:27:11.051218282 +0000 UTC m=+923.287061838" observedRunningTime="2025-12-13 03:27:12.771461081 +0000 UTC m=+925.007304637" watchObservedRunningTime="2025-12-13 03:27:12.774912505 +0000 UTC m=+925.010756061" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.781113 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" podStartSLOduration=2.517542073 podStartE2EDuration="37.781094783s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:36.788612783 +0000 UTC m=+889.024456329" lastFinishedPulling="2025-12-13 03:27:12.052165493 +0000 UTC m=+924.288009039" observedRunningTime="2025-12-13 03:27:12.757699155 +0000 UTC m=+924.993542701" watchObservedRunningTime="2025-12-13 03:27:12.781094783 +0000 UTC m=+925.016938329" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.800232 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" podStartSLOduration=8.891741024 podStartE2EDuration="37.800209936s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.62727086 +0000 UTC m=+889.863114406" lastFinishedPulling="2025-12-13 03:27:06.535739762 +0000 UTC m=+918.771583318" observedRunningTime="2025-12-13 03:27:12.79046118 +0000 UTC m=+925.026304736" watchObservedRunningTime="2025-12-13 03:27:12.800209936 +0000 UTC m=+925.036053502" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.820942 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" podStartSLOduration=11.297502906 podStartE2EDuration="37.820921112s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.606714287 +0000 UTC m=+889.842557833" lastFinishedPulling="2025-12-13 03:27:04.130132493 +0000 UTC m=+916.365976039" observedRunningTime="2025-12-13 03:27:12.814577258 +0000 UTC m=+925.050420804" watchObservedRunningTime="2025-12-13 03:27:12.820921112 +0000 UTC m=+925.056764658" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.877459 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" podStartSLOduration=3.381224092 podStartE2EDuration="37.877407925s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.556648578 +0000 UTC m=+889.792492124" lastFinishedPulling="2025-12-13 03:27:12.052832411 +0000 UTC m=+924.288675957" observedRunningTime="2025-12-13 03:27:12.873457096 +0000 UTC m=+925.109300642" watchObservedRunningTime="2025-12-13 03:27:12.877407925 +0000 UTC m=+925.113251471" Dec 13 03:27:12 crc kubenswrapper[5070]: I1213 03:27:12.910830 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" podStartSLOduration=8.983714624 podStartE2EDuration="37.910812427s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.609695138 +0000 UTC m=+889.845538684" lastFinishedPulling="2025-12-13 03:27:06.536792941 +0000 UTC m=+918.772636487" observedRunningTime="2025-12-13 03:27:12.906394496 +0000 UTC m=+925.142238042" watchObservedRunningTime="2025-12-13 03:27:12.910812427 +0000 UTC m=+925.146655973" Dec 13 03:27:13 crc kubenswrapper[5070]: I1213 03:27:13.762813 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" event={"ID":"eac9d917-6a87-4f79-9758-15984dd71e23","Type":"ContainerStarted","Data":"425c82111129ab5b4ef88c79b176534f604049f252dddd50fe3edd532a92fec4"} Dec 13 03:27:13 crc kubenswrapper[5070]: I1213 03:27:13.763211 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" Dec 13 03:27:13 crc kubenswrapper[5070]: I1213 03:27:13.764373 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" event={"ID":"df0aabbc-0421-41d7-ac6b-540a4bd7121e","Type":"ContainerStarted","Data":"ae6de332e8eeffe95c67ffcb7cc8d87628a564680323c3226dacfbe67edc9b7d"} Dec 13 03:27:13 crc kubenswrapper[5070]: I1213 03:27:13.783879 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" podStartSLOduration=2.428598299 podStartE2EDuration="38.783861724s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:36.927671797 +0000 UTC m=+889.163515343" lastFinishedPulling="2025-12-13 03:27:13.282935212 +0000 UTC m=+925.518778768" observedRunningTime="2025-12-13 03:27:13.777223173 +0000 UTC m=+926.013066719" watchObservedRunningTime="2025-12-13 03:27:13.783861724 +0000 UTC m=+926.019705270" Dec 13 03:27:13 crc kubenswrapper[5070]: I1213 03:27:13.802208 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" podStartSLOduration=3.029597141 podStartE2EDuration="38.802184325s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.044529573 +0000 UTC m=+889.280373119" lastFinishedPulling="2025-12-13 03:27:12.817116757 +0000 UTC m=+925.052960303" observedRunningTime="2025-12-13 03:27:13.798817143 +0000 UTC m=+926.034660689" watchObservedRunningTime="2025-12-13 03:27:13.802184325 +0000 UTC m=+926.038027871" Dec 13 03:27:15 crc kubenswrapper[5070]: I1213 03:27:15.923194 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" Dec 13 03:27:15 crc kubenswrapper[5070]: I1213 03:27:15.986318 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-tnnbf" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.019415 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-nm89q" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.061602 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-lt4rh" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.061767 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-9xzl9" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.192724 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.218237 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-9kcnz" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.270177 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-5mqm7" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.338150 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-867d87977b-v59rf" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.409071 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lcwtx" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.436678 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-nh8lk" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.440995 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-85d55b5858-xjz7n" Dec 13 03:27:16 crc kubenswrapper[5070]: I1213 03:27:16.518007 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-8svhb" Dec 13 03:27:17 crc kubenswrapper[5070]: I1213 03:27:17.827227 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-qs7zf" Dec 13 03:27:21 crc kubenswrapper[5070]: E1213 03:27:21.168846 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" podUID="890ff40a-bb3f-435f-b823-a4e93bf712c0" Dec 13 03:27:23 crc kubenswrapper[5070]: I1213 03:27:23.837216 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" event={"ID":"17401b15-4810-4d28-9244-f1ef166c3278","Type":"ContainerStarted","Data":"8a6947e96c67538cd7e95e901caeb13efab1ec24c2a80f9edbd5443cc6f9735f"} Dec 13 03:27:23 crc kubenswrapper[5070]: I1213 03:27:23.837862 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" Dec 13 03:27:23 crc kubenswrapper[5070]: I1213 03:27:23.880501 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" podStartSLOduration=2.358582572 podStartE2EDuration="48.880473732s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.071641834 +0000 UTC m=+889.307485380" lastFinishedPulling="2025-12-13 03:27:23.593532964 +0000 UTC m=+935.829376540" observedRunningTime="2025-12-13 03:27:23.862339586 +0000 UTC m=+936.098183162" watchObservedRunningTime="2025-12-13 03:27:23.880473732 +0000 UTC m=+936.116317288" Dec 13 03:27:25 crc kubenswrapper[5070]: I1213 03:27:25.896971 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-blc2l" Dec 13 03:27:25 crc kubenswrapper[5070]: I1213 03:27:25.903158 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-54485f899-6ccdh" Dec 13 03:27:25 crc kubenswrapper[5070]: I1213 03:27:25.925901 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-75f975cf8c-wb8p4" Dec 13 03:27:25 crc kubenswrapper[5070]: I1213 03:27:25.956542 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-8hczh" Dec 13 03:27:26 crc kubenswrapper[5070]: I1213 03:27:26.368178 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-8fhvk" Dec 13 03:27:27 crc kubenswrapper[5070]: I1213 03:27:27.885860 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" event={"ID":"66c61c3a-a7b0-4a4a-b086-9c1531a9b165","Type":"ContainerStarted","Data":"78efe34a2960cbaf86137c253aa81f4381e70881b02b7ae0dd2372ece8ade294"} Dec 13 03:27:27 crc kubenswrapper[5070]: I1213 03:27:27.886583 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" Dec 13 03:27:27 crc kubenswrapper[5070]: I1213 03:27:27.889954 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" event={"ID":"69dbad02-02d2-4a9c-befc-bf082990eca7","Type":"ContainerStarted","Data":"1d4b4b392b2364fe154e62f469e854631f9c81223674373fce81528f9f599d62"} Dec 13 03:27:27 crc kubenswrapper[5070]: I1213 03:27:27.890269 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" Dec 13 03:27:27 crc kubenswrapper[5070]: I1213 03:27:27.926967 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" podStartSLOduration=4.972011358 podStartE2EDuration="52.92693425s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:36.789251972 +0000 UTC m=+889.025095518" lastFinishedPulling="2025-12-13 03:27:24.744174864 +0000 UTC m=+936.980018410" observedRunningTime="2025-12-13 03:27:27.919021384 +0000 UTC m=+940.154864970" watchObservedRunningTime="2025-12-13 03:27:27.92693425 +0000 UTC m=+940.162777836" Dec 13 03:27:27 crc kubenswrapper[5070]: I1213 03:27:27.955857 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" podStartSLOduration=3.279075843 podStartE2EDuration="52.955827929s" podCreationTimestamp="2025-12-13 03:26:35 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.620303229 +0000 UTC m=+889.856146775" lastFinishedPulling="2025-12-13 03:27:27.297055315 +0000 UTC m=+939.532898861" observedRunningTime="2025-12-13 03:27:27.94557511 +0000 UTC m=+940.181418686" watchObservedRunningTime="2025-12-13 03:27:27.955827929 +0000 UTC m=+940.191671515" Dec 13 03:27:34 crc kubenswrapper[5070]: I1213 03:27:34.139495 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" event={"ID":"890ff40a-bb3f-435f-b823-a4e93bf712c0","Type":"ContainerStarted","Data":"ce6bc533db00261fe0de9f7daf3004ad46b3c7c94632170edf80ea60ff8a39d7"} Dec 13 03:27:34 crc kubenswrapper[5070]: I1213 03:27:34.158372 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2c52x" podStartSLOduration=2.038280388 podStartE2EDuration="58.15834595s" podCreationTimestamp="2025-12-13 03:26:36 +0000 UTC" firstStartedPulling="2025-12-13 03:26:37.691181777 +0000 UTC m=+889.927025333" lastFinishedPulling="2025-12-13 03:27:33.811247349 +0000 UTC m=+946.047090895" observedRunningTime="2025-12-13 03:27:34.157200959 +0000 UTC m=+946.393044545" watchObservedRunningTime="2025-12-13 03:27:34.15834595 +0000 UTC m=+946.394189496" Dec 13 03:27:35 crc kubenswrapper[5070]: I1213 03:27:35.891197 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-889ct" Dec 13 03:27:35 crc kubenswrapper[5070]: I1213 03:27:35.979519 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-98cmt" Dec 13 03:27:36 crc kubenswrapper[5070]: I1213 03:27:36.472963 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-bb86466d8-qhqhn" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.564433 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4tjw"] Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.566209 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.568085 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.568299 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.568410 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-t5wjt" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.571346 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.585169 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4tjw"] Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.632571 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9hvp"] Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.634649 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.638966 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.646841 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9hvp"] Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.716635 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-v9hvp\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.716693 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb6md\" (UniqueName: \"kubernetes.io/projected/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-kube-api-access-lb6md\") pod \"dnsmasq-dns-78dd6ddcc-v9hvp\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.716728 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-config\") pod \"dnsmasq-dns-78dd6ddcc-v9hvp\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.716844 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/010ab03f-0c20-4b24-8137-284558e10da5-config\") pod \"dnsmasq-dns-675f4bcbfc-n4tjw\" (UID: \"010ab03f-0c20-4b24-8137-284558e10da5\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.716882 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxqmk\" (UniqueName: \"kubernetes.io/projected/010ab03f-0c20-4b24-8137-284558e10da5-kube-api-access-xxqmk\") pod \"dnsmasq-dns-675f4bcbfc-n4tjw\" (UID: \"010ab03f-0c20-4b24-8137-284558e10da5\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.817949 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/010ab03f-0c20-4b24-8137-284558e10da5-config\") pod \"dnsmasq-dns-675f4bcbfc-n4tjw\" (UID: \"010ab03f-0c20-4b24-8137-284558e10da5\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.818013 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxqmk\" (UniqueName: \"kubernetes.io/projected/010ab03f-0c20-4b24-8137-284558e10da5-kube-api-access-xxqmk\") pod \"dnsmasq-dns-675f4bcbfc-n4tjw\" (UID: \"010ab03f-0c20-4b24-8137-284558e10da5\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.818073 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-v9hvp\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.818098 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb6md\" (UniqueName: \"kubernetes.io/projected/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-kube-api-access-lb6md\") pod \"dnsmasq-dns-78dd6ddcc-v9hvp\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.818122 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-config\") pod \"dnsmasq-dns-78dd6ddcc-v9hvp\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.819065 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/010ab03f-0c20-4b24-8137-284558e10da5-config\") pod \"dnsmasq-dns-675f4bcbfc-n4tjw\" (UID: \"010ab03f-0c20-4b24-8137-284558e10da5\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.819114 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-config\") pod \"dnsmasq-dns-78dd6ddcc-v9hvp\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.819143 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-v9hvp\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.839600 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb6md\" (UniqueName: \"kubernetes.io/projected/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-kube-api-access-lb6md\") pod \"dnsmasq-dns-78dd6ddcc-v9hvp\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.840256 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxqmk\" (UniqueName: \"kubernetes.io/projected/010ab03f-0c20-4b24-8137-284558e10da5-kube-api-access-xxqmk\") pod \"dnsmasq-dns-675f4bcbfc-n4tjw\" (UID: \"010ab03f-0c20-4b24-8137-284558e10da5\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.885048 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" Dec 13 03:27:51 crc kubenswrapper[5070]: I1213 03:27:51.964604 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:27:52 crc kubenswrapper[5070]: I1213 03:27:52.291156 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9hvp"] Dec 13 03:27:52 crc kubenswrapper[5070]: I1213 03:27:52.389825 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4tjw"] Dec 13 03:27:52 crc kubenswrapper[5070]: W1213 03:27:52.393737 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod010ab03f_0c20_4b24_8137_284558e10da5.slice/crio-f762bad237f0bf4c36f7100a287be14152cfd8b022644123fae169b218d91d0a WatchSource:0}: Error finding container f762bad237f0bf4c36f7100a287be14152cfd8b022644123fae169b218d91d0a: Status 404 returned error can't find the container with id f762bad237f0bf4c36f7100a287be14152cfd8b022644123fae169b218d91d0a Dec 13 03:27:53 crc kubenswrapper[5070]: I1213 03:27:53.284816 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" event={"ID":"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b","Type":"ContainerStarted","Data":"456da7bed6b0284b4208ded4245a0ba971f093bebe4865e67ab9fa262707472e"} Dec 13 03:27:53 crc kubenswrapper[5070]: I1213 03:27:53.286235 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" event={"ID":"010ab03f-0c20-4b24-8137-284558e10da5","Type":"ContainerStarted","Data":"f762bad237f0bf4c36f7100a287be14152cfd8b022644123fae169b218d91d0a"} Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.657773 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4tjw"] Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.685104 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7z824"] Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.686250 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.696141 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7z824"] Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.790486 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-785dj\" (UniqueName: \"kubernetes.io/projected/3e429df3-9f7f-40e4-8d73-d3c492104561-kube-api-access-785dj\") pod \"dnsmasq-dns-666b6646f7-7z824\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.790603 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7z824\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.790648 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-config\") pod \"dnsmasq-dns-666b6646f7-7z824\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.893694 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7z824\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.893772 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-config\") pod \"dnsmasq-dns-666b6646f7-7z824\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.893829 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-785dj\" (UniqueName: \"kubernetes.io/projected/3e429df3-9f7f-40e4-8d73-d3c492104561-kube-api-access-785dj\") pod \"dnsmasq-dns-666b6646f7-7z824\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.895300 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-dns-svc\") pod \"dnsmasq-dns-666b6646f7-7z824\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.895426 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-config\") pod \"dnsmasq-dns-666b6646f7-7z824\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.930579 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-785dj\" (UniqueName: \"kubernetes.io/projected/3e429df3-9f7f-40e4-8d73-d3c492104561-kube-api-access-785dj\") pod \"dnsmasq-dns-666b6646f7-7z824\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:54 crc kubenswrapper[5070]: I1213 03:27:54.981270 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9hvp"] Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.000420 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9w6wn"] Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.001580 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.013572 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.026053 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9w6wn"] Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.202382 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhwrf\" (UniqueName: \"kubernetes.io/projected/847376ab-5cb3-46ed-8b13-f0a21f09c135-kube-api-access-vhwrf\") pod \"dnsmasq-dns-57d769cc4f-9w6wn\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.202464 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-9w6wn\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.202509 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-config\") pod \"dnsmasq-dns-57d769cc4f-9w6wn\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.309322 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhwrf\" (UniqueName: \"kubernetes.io/projected/847376ab-5cb3-46ed-8b13-f0a21f09c135-kube-api-access-vhwrf\") pod \"dnsmasq-dns-57d769cc4f-9w6wn\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.309402 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-9w6wn\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.309499 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-config\") pod \"dnsmasq-dns-57d769cc4f-9w6wn\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.310334 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-config\") pod \"dnsmasq-dns-57d769cc4f-9w6wn\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.313037 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-9w6wn\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.348873 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhwrf\" (UniqueName: \"kubernetes.io/projected/847376ab-5cb3-46ed-8b13-f0a21f09c135-kube-api-access-vhwrf\") pod \"dnsmasq-dns-57d769cc4f-9w6wn\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.635582 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.647248 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7z824"] Dec 13 03:27:55 crc kubenswrapper[5070]: W1213 03:27:55.665287 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e429df3_9f7f_40e4_8d73_d3c492104561.slice/crio-c4350282aa912190059cf07358c2309b3112f2566a616c4b853446881f3055cb WatchSource:0}: Error finding container c4350282aa912190059cf07358c2309b3112f2566a616c4b853446881f3055cb: Status 404 returned error can't find the container with id c4350282aa912190059cf07358c2309b3112f2566a616c4b853446881f3055cb Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.844106 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.846007 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.851970 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.852195 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-hcv2k" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.852358 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.852562 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.852710 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.853249 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.853455 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.865172 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.939578 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.939728 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.939754 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.939823 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.939921 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.939955 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlsq6\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-kube-api-access-wlsq6\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.940004 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-config-data\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.940027 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.940042 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.940059 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:55 crc kubenswrapper[5070]: I1213 03:27:55.940088 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.079883 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.079934 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.079962 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.079991 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.080023 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.080052 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlsq6\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-kube-api-access-wlsq6\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.080089 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-config-data\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.080123 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.080139 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.080157 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.080241 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.080638 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.081201 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.081539 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.089206 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.089862 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.090815 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-config-data\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.090987 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.097299 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.098827 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.099966 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.101671 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlsq6\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-kube-api-access-wlsq6\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.110535 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.117086 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.118433 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.121984 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.122804 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.125548 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.125762 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.125890 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.125930 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.126409 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-qjxjw" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.138909 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.185009 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.231220 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9w6wn"] Dec 13 03:27:56 crc kubenswrapper[5070]: W1213 03:27:56.232509 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod847376ab_5cb3_46ed_8b13_f0a21f09c135.slice/crio-bbdf6ce710ce34b56a2f47646177c2340fb8ed08e7e0ab3c2d3646e35f0f3834 WatchSource:0}: Error finding container bbdf6ce710ce34b56a2f47646177c2340fb8ed08e7e0ab3c2d3646e35f0f3834: Status 404 returned error can't find the container with id bbdf6ce710ce34b56a2f47646177c2340fb8ed08e7e0ab3c2d3646e35f0f3834 Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.288506 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.288602 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.288670 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.288727 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.288805 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgtbx\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-kube-api-access-mgtbx\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.288831 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.288894 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.289060 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d669856e-7406-451a-825e-9de1fc76f8b2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.289089 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.290421 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.290476 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d669856e-7406-451a-825e-9de1fc76f8b2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.358724 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" event={"ID":"847376ab-5cb3-46ed-8b13-f0a21f09c135","Type":"ContainerStarted","Data":"bbdf6ce710ce34b56a2f47646177c2340fb8ed08e7e0ab3c2d3646e35f0f3834"} Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.372508 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7z824" event={"ID":"3e429df3-9f7f-40e4-8d73-d3c492104561","Type":"ContainerStarted","Data":"c4350282aa912190059cf07358c2309b3112f2566a616c4b853446881f3055cb"} Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.392718 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.392815 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.392857 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.392902 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.392956 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgtbx\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-kube-api-access-mgtbx\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.392975 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.393110 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.393137 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d669856e-7406-451a-825e-9de1fc76f8b2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.393163 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.393191 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.393240 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d669856e-7406-451a-825e-9de1fc76f8b2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.393752 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.395088 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.395347 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.395538 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.396206 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.396605 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.400249 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.401713 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d669856e-7406-451a-825e-9de1fc76f8b2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.402978 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d669856e-7406-451a-825e-9de1fc76f8b2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.403725 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.421264 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgtbx\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-kube-api-access-mgtbx\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.432459 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.459829 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:27:56 crc kubenswrapper[5070]: I1213 03:27:56.822096 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.019849 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 03:27:57 crc kubenswrapper[5070]: W1213 03:27:57.028738 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd669856e_7406_451a_825e_9de1fc76f8b2.slice/crio-f7173909be8baba2717545f4dee5f99e2a66454c88fefd7520a3aa462df2a51d WatchSource:0}: Error finding container f7173909be8baba2717545f4dee5f99e2a66454c88fefd7520a3aa462df2a51d: Status 404 returned error can't find the container with id f7173909be8baba2717545f4dee5f99e2a66454c88fefd7520a3aa462df2a51d Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.380399 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9dd13bd9-bfbd-4f80-b334-d8b959a6187d","Type":"ContainerStarted","Data":"ef6ffc167256dc2ba02949de0fea32f46eb3d6e61e4d9d993516512c1fb024a3"} Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.381748 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d669856e-7406-451a-825e-9de1fc76f8b2","Type":"ContainerStarted","Data":"f7173909be8baba2717545f4dee5f99e2a66454c88fefd7520a3aa462df2a51d"} Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.617654 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.619191 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.622049 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.627569 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.627674 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.628150 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-9z5l6" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.628806 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.636177 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.648124 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.715818 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.715865 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/df672553-0b7d-4554-9ff6-c5d9a674dffd-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.715893 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/df672553-0b7d-4554-9ff6-c5d9a674dffd-config-data-default\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.715920 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t2dc\" (UniqueName: \"kubernetes.io/projected/df672553-0b7d-4554-9ff6-c5d9a674dffd-kube-api-access-7t2dc\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.715943 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df672553-0b7d-4554-9ff6-c5d9a674dffd-operator-scripts\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.715961 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/df672553-0b7d-4554-9ff6-c5d9a674dffd-secrets\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.716153 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/df672553-0b7d-4554-9ff6-c5d9a674dffd-config-data-generated\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.716257 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/df672553-0b7d-4554-9ff6-c5d9a674dffd-kolla-config\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.716336 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df672553-0b7d-4554-9ff6-c5d9a674dffd-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.817732 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.817787 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/df672553-0b7d-4554-9ff6-c5d9a674dffd-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.817821 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/df672553-0b7d-4554-9ff6-c5d9a674dffd-config-data-default\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.817858 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t2dc\" (UniqueName: \"kubernetes.io/projected/df672553-0b7d-4554-9ff6-c5d9a674dffd-kube-api-access-7t2dc\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.817893 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df672553-0b7d-4554-9ff6-c5d9a674dffd-operator-scripts\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.817922 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/df672553-0b7d-4554-9ff6-c5d9a674dffd-secrets\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.817948 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/df672553-0b7d-4554-9ff6-c5d9a674dffd-config-data-generated\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.817975 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/df672553-0b7d-4554-9ff6-c5d9a674dffd-kolla-config\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.818004 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df672553-0b7d-4554-9ff6-c5d9a674dffd-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.818132 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.818875 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/df672553-0b7d-4554-9ff6-c5d9a674dffd-config-data-generated\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.819509 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/df672553-0b7d-4554-9ff6-c5d9a674dffd-config-data-default\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.819623 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/df672553-0b7d-4554-9ff6-c5d9a674dffd-kolla-config\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.821378 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df672553-0b7d-4554-9ff6-c5d9a674dffd-operator-scripts\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.825902 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/df672553-0b7d-4554-9ff6-c5d9a674dffd-secrets\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.826357 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df672553-0b7d-4554-9ff6-c5d9a674dffd-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.827111 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/df672553-0b7d-4554-9ff6-c5d9a674dffd-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.842145 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t2dc\" (UniqueName: \"kubernetes.io/projected/df672553-0b7d-4554-9ff6-c5d9a674dffd-kube-api-access-7t2dc\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.848427 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"df672553-0b7d-4554-9ff6-c5d9a674dffd\") " pod="openstack/openstack-galera-0" Dec 13 03:27:57 crc kubenswrapper[5070]: I1213 03:27:57.937063 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 13 03:27:58 crc kubenswrapper[5070]: I1213 03:27:58.254630 5070 patch_prober.go:28] interesting pod/authentication-operator-69f744f599-k6vpl container/authentication-operator namespace/openshift-authentication-operator: Liveness probe status=failure output="Get \"https://10.217.0.17:8443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 03:27:58 crc kubenswrapper[5070]: I1213 03:27:58.255312 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-authentication-operator/authentication-operator-69f744f599-k6vpl" podUID="bbdcfa81-b48d-4067-af2e-0de54cea8c7e" containerName="authentication-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 03:27:58 crc kubenswrapper[5070]: W1213 03:27:58.436849 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf672553_0b7d_4554_9ff6_c5d9a674dffd.slice/crio-5039e67c0a32e6f21f2796c761a96592900ef99168c767b6bc82ec3e39b6293d WatchSource:0}: Error finding container 5039e67c0a32e6f21f2796c761a96592900ef99168c767b6bc82ec3e39b6293d: Status 404 returned error can't find the container with id 5039e67c0a32e6f21f2796c761a96592900ef99168c767b6bc82ec3e39b6293d Dec 13 03:27:58 crc kubenswrapper[5070]: I1213 03:27:58.437957 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.028950 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.030162 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.036336 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-7f5lc" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.037838 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.038048 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.038921 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.041678 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.142045 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9dd33a5a-3305-45ff-a544-0bae02032d8f-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.142309 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9dd33a5a-3305-45ff-a544-0bae02032d8f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.142333 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dd33a5a-3305-45ff-a544-0bae02032d8f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.142372 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9dd33a5a-3305-45ff-a544-0bae02032d8f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.142390 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p789\" (UniqueName: \"kubernetes.io/projected/9dd33a5a-3305-45ff-a544-0bae02032d8f-kube-api-access-4p789\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.142436 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.142465 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9dd33a5a-3305-45ff-a544-0bae02032d8f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.142481 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dd33a5a-3305-45ff-a544-0bae02032d8f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.142519 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dd33a5a-3305-45ff-a544-0bae02032d8f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.243889 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.243955 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9dd33a5a-3305-45ff-a544-0bae02032d8f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.243980 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dd33a5a-3305-45ff-a544-0bae02032d8f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.244020 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dd33a5a-3305-45ff-a544-0bae02032d8f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.244074 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9dd33a5a-3305-45ff-a544-0bae02032d8f-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.244091 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9dd33a5a-3305-45ff-a544-0bae02032d8f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.244113 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dd33a5a-3305-45ff-a544-0bae02032d8f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.244154 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9dd33a5a-3305-45ff-a544-0bae02032d8f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.244171 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p789\" (UniqueName: \"kubernetes.io/projected/9dd33a5a-3305-45ff-a544-0bae02032d8f-kube-api-access-4p789\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.244286 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.246201 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9dd33a5a-3305-45ff-a544-0bae02032d8f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.251436 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9dd33a5a-3305-45ff-a544-0bae02032d8f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.251769 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9dd33a5a-3305-45ff-a544-0bae02032d8f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.253533 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9dd33a5a-3305-45ff-a544-0bae02032d8f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.258145 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dd33a5a-3305-45ff-a544-0bae02032d8f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.269263 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/9dd33a5a-3305-45ff-a544-0bae02032d8f-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.269270 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dd33a5a-3305-45ff-a544-0bae02032d8f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.271745 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p789\" (UniqueName: \"kubernetes.io/projected/9dd33a5a-3305-45ff-a544-0bae02032d8f-kube-api-access-4p789\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.303518 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9dd33a5a-3305-45ff-a544-0bae02032d8f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.318317 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.319556 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.321311 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.324152 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.327353 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-gppm7" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.337842 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.357244 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.400731 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"df672553-0b7d-4554-9ff6-c5d9a674dffd","Type":"ContainerStarted","Data":"5039e67c0a32e6f21f2796c761a96592900ef99168c767b6bc82ec3e39b6293d"} Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.453788 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-config-data\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.453848 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-kolla-config\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.453893 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.453946 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cs8t\" (UniqueName: \"kubernetes.io/projected/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-kube-api-access-4cs8t\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.453999 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.555680 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.555733 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-config-data\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.555763 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-kolla-config\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.555796 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.555840 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cs8t\" (UniqueName: \"kubernetes.io/projected/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-kube-api-access-4cs8t\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.557640 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-kolla-config\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.557646 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-config-data\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.560425 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.560931 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.575017 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cs8t\" (UniqueName: \"kubernetes.io/projected/c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b-kube-api-access-4cs8t\") pod \"memcached-0\" (UID: \"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b\") " pod="openstack/memcached-0" Dec 13 03:27:59 crc kubenswrapper[5070]: I1213 03:27:59.663740 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 13 03:28:00 crc kubenswrapper[5070]: I1213 03:28:00.947612 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 03:28:00 crc kubenswrapper[5070]: I1213 03:28:00.948848 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 03:28:00 crc kubenswrapper[5070]: I1213 03:28:00.951431 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-sjtgt" Dec 13 03:28:00 crc kubenswrapper[5070]: I1213 03:28:00.972747 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 03:28:01 crc kubenswrapper[5070]: I1213 03:28:01.128166 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h48x7\" (UniqueName: \"kubernetes.io/projected/691f47eb-c110-4423-8dac-5515ac7306e0-kube-api-access-h48x7\") pod \"kube-state-metrics-0\" (UID: \"691f47eb-c110-4423-8dac-5515ac7306e0\") " pod="openstack/kube-state-metrics-0" Dec 13 03:28:01 crc kubenswrapper[5070]: I1213 03:28:01.230106 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h48x7\" (UniqueName: \"kubernetes.io/projected/691f47eb-c110-4423-8dac-5515ac7306e0-kube-api-access-h48x7\") pod \"kube-state-metrics-0\" (UID: \"691f47eb-c110-4423-8dac-5515ac7306e0\") " pod="openstack/kube-state-metrics-0" Dec 13 03:28:01 crc kubenswrapper[5070]: I1213 03:28:01.247382 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h48x7\" (UniqueName: \"kubernetes.io/projected/691f47eb-c110-4423-8dac-5515ac7306e0-kube-api-access-h48x7\") pod \"kube-state-metrics-0\" (UID: \"691f47eb-c110-4423-8dac-5515ac7306e0\") " pod="openstack/kube-state-metrics-0" Dec 13 03:28:01 crc kubenswrapper[5070]: I1213 03:28:01.269835 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 03:28:03 crc kubenswrapper[5070]: I1213 03:28:03.925502 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-x4xfl"] Dec 13 03:28:03 crc kubenswrapper[5070]: I1213 03:28:03.926642 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:03 crc kubenswrapper[5070]: I1213 03:28:03.928975 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-7gdwn" Dec 13 03:28:03 crc kubenswrapper[5070]: I1213 03:28:03.930161 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 13 03:28:03 crc kubenswrapper[5070]: I1213 03:28:03.931145 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 13 03:28:03 crc kubenswrapper[5070]: I1213 03:28:03.938281 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-x4xfl"] Dec 13 03:28:03 crc kubenswrapper[5070]: I1213 03:28:03.954523 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-zmbl5"] Dec 13 03:28:03 crc kubenswrapper[5070]: I1213 03:28:03.956029 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:03 crc kubenswrapper[5070]: I1213 03:28:03.964361 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-zmbl5"] Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078507 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29c56\" (UniqueName: \"kubernetes.io/projected/5f8ec385-4f69-4645-95d2-2d854b3fac57-kube-api-access-29c56\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078589 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-var-run\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078617 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-scripts\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078635 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-var-lib\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078654 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-var-log-ovn\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078687 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-ovn-controller-tls-certs\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078703 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-var-run\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078759 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-etc-ovs\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078774 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-combined-ca-bundle\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078800 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txbxm\" (UniqueName: \"kubernetes.io/projected/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-kube-api-access-txbxm\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078828 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-var-log\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078863 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f8ec385-4f69-4645-95d2-2d854b3fac57-scripts\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.078886 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-var-run-ovn\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.179746 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-var-log-ovn\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.179791 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-ovn-controller-tls-certs\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.179822 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-var-run\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.179895 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-etc-ovs\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.179917 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-combined-ca-bundle\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.179948 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txbxm\" (UniqueName: \"kubernetes.io/projected/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-kube-api-access-txbxm\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.179977 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-var-log\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.180002 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f8ec385-4f69-4645-95d2-2d854b3fac57-scripts\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.180024 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-var-run-ovn\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.180055 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29c56\" (UniqueName: \"kubernetes.io/projected/5f8ec385-4f69-4645-95d2-2d854b3fac57-kube-api-access-29c56\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.180078 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-var-run\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.180106 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-scripts\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.180128 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-var-lib\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.180281 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-var-log-ovn\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.180403 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-var-lib\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.180409 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-var-log\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.181027 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-var-run\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.181164 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-var-run\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.181227 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-var-run-ovn\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.181302 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/5f8ec385-4f69-4645-95d2-2d854b3fac57-etc-ovs\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.182679 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f8ec385-4f69-4645-95d2-2d854b3fac57-scripts\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.183144 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-scripts\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.185005 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-combined-ca-bundle\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.201030 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txbxm\" (UniqueName: \"kubernetes.io/projected/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-kube-api-access-txbxm\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.204900 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4-ovn-controller-tls-certs\") pod \"ovn-controller-x4xfl\" (UID: \"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4\") " pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.206070 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29c56\" (UniqueName: \"kubernetes.io/projected/5f8ec385-4f69-4645-95d2-2d854b3fac57-kube-api-access-29c56\") pod \"ovn-controller-ovs-zmbl5\" (UID: \"5f8ec385-4f69-4645-95d2-2d854b3fac57\") " pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.245351 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:04 crc kubenswrapper[5070]: I1213 03:28:04.272826 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.462188 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.466073 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.469317 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.469586 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.469930 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-k2h8p" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.470107 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.471136 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.481259 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.567780 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/037626c8-04bb-4af4-a5f3-309c3c174f98-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.567833 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/037626c8-04bb-4af4-a5f3-309c3c174f98-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.567988 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/037626c8-04bb-4af4-a5f3-309c3c174f98-config\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.568042 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vb9k\" (UniqueName: \"kubernetes.io/projected/037626c8-04bb-4af4-a5f3-309c3c174f98-kube-api-access-6vb9k\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.568124 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037626c8-04bb-4af4-a5f3-309c3c174f98-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.568185 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/037626c8-04bb-4af4-a5f3-309c3c174f98-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.568244 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.568400 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/037626c8-04bb-4af4-a5f3-309c3c174f98-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.669901 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/037626c8-04bb-4af4-a5f3-309c3c174f98-config\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.669949 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vb9k\" (UniqueName: \"kubernetes.io/projected/037626c8-04bb-4af4-a5f3-309c3c174f98-kube-api-access-6vb9k\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.669983 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037626c8-04bb-4af4-a5f3-309c3c174f98-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.670019 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/037626c8-04bb-4af4-a5f3-309c3c174f98-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.670048 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.670070 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/037626c8-04bb-4af4-a5f3-309c3c174f98-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.670100 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/037626c8-04bb-4af4-a5f3-309c3c174f98-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.670130 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/037626c8-04bb-4af4-a5f3-309c3c174f98-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.670679 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/037626c8-04bb-4af4-a5f3-309c3c174f98-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.670932 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.671776 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/037626c8-04bb-4af4-a5f3-309c3c174f98-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.671810 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/037626c8-04bb-4af4-a5f3-309c3c174f98-config\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.677181 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/037626c8-04bb-4af4-a5f3-309c3c174f98-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.679358 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037626c8-04bb-4af4-a5f3-309c3c174f98-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.689340 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/037626c8-04bb-4af4-a5f3-309c3c174f98-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.694771 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vb9k\" (UniqueName: \"kubernetes.io/projected/037626c8-04bb-4af4-a5f3-309c3c174f98-kube-api-access-6vb9k\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.698214 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"037626c8-04bb-4af4-a5f3-309c3c174f98\") " pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:06 crc kubenswrapper[5070]: I1213 03:28:06.786826 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.249015 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.250538 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.252677 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.253208 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.255503 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-5p78b" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.255600 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.263584 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.397718 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/077843d3-f8ee-476e-b18c-da48a3a2f200-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.397807 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/077843d3-f8ee-476e-b18c-da48a3a2f200-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.397898 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.397941 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrmjb\" (UniqueName: \"kubernetes.io/projected/077843d3-f8ee-476e-b18c-da48a3a2f200-kube-api-access-zrmjb\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.397981 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/077843d3-f8ee-476e-b18c-da48a3a2f200-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.398007 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/077843d3-f8ee-476e-b18c-da48a3a2f200-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.398147 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/077843d3-f8ee-476e-b18c-da48a3a2f200-config\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.398179 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/077843d3-f8ee-476e-b18c-da48a3a2f200-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.500060 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/077843d3-f8ee-476e-b18c-da48a3a2f200-config\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.500128 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/077843d3-f8ee-476e-b18c-da48a3a2f200-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.500187 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/077843d3-f8ee-476e-b18c-da48a3a2f200-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.500237 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/077843d3-f8ee-476e-b18c-da48a3a2f200-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.500273 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.500303 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrmjb\" (UniqueName: \"kubernetes.io/projected/077843d3-f8ee-476e-b18c-da48a3a2f200-kube-api-access-zrmjb\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.500367 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/077843d3-f8ee-476e-b18c-da48a3a2f200-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.500817 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/077843d3-f8ee-476e-b18c-da48a3a2f200-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.501073 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/077843d3-f8ee-476e-b18c-da48a3a2f200-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.501501 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.501545 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/077843d3-f8ee-476e-b18c-da48a3a2f200-config\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.502565 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/077843d3-f8ee-476e-b18c-da48a3a2f200-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.506620 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/077843d3-f8ee-476e-b18c-da48a3a2f200-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.515275 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/077843d3-f8ee-476e-b18c-da48a3a2f200-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.516144 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/077843d3-f8ee-476e-b18c-da48a3a2f200-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.519007 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrmjb\" (UniqueName: \"kubernetes.io/projected/077843d3-f8ee-476e-b18c-da48a3a2f200-kube-api-access-zrmjb\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.528930 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-sb-0\" (UID: \"077843d3-f8ee-476e-b18c-da48a3a2f200\") " pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:08 crc kubenswrapper[5070]: I1213 03:28:08.581144 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:13 crc kubenswrapper[5070]: E1213 03:28:13.760143 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 13 03:28:13 crc kubenswrapper[5070]: E1213 03:28:13.760380 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xxqmk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-n4tjw_openstack(010ab03f-0c20-4b24-8137-284558e10da5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:28:13 crc kubenswrapper[5070]: E1213 03:28:13.761587 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" podUID="010ab03f-0c20-4b24-8137-284558e10da5" Dec 13 03:28:13 crc kubenswrapper[5070]: E1213 03:28:13.764759 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 13 03:28:13 crc kubenswrapper[5070]: E1213 03:28:13.764874 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lb6md,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-v9hvp_openstack(dee9f29c-350c-4461-ba3c-a8ed90b2ce7b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:28:13 crc kubenswrapper[5070]: E1213 03:28:13.766079 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" podUID="dee9f29c-350c-4461-ba3c-a8ed90b2ce7b" Dec 13 03:28:13 crc kubenswrapper[5070]: E1213 03:28:13.775945 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 13 03:28:13 crc kubenswrapper[5070]: E1213 03:28:13.776590 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-785dj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-7z824_openstack(3e429df3-9f7f-40e4-8d73-d3c492104561): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:28:13 crc kubenswrapper[5070]: E1213 03:28:13.778118 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-7z824" podUID="3e429df3-9f7f-40e4-8d73-d3c492104561" Dec 13 03:28:14 crc kubenswrapper[5070]: E1213 03:28:14.534277 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-7z824" podUID="3e429df3-9f7f-40e4-8d73-d3c492104561" Dec 13 03:28:17 crc kubenswrapper[5070]: E1213 03:28:17.736622 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 13 03:28:17 crc kubenswrapper[5070]: E1213 03:28:17.737115 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wlsq6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(9dd13bd9-bfbd-4f80-b334-d8b959a6187d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:28:17 crc kubenswrapper[5070]: E1213 03:28:17.738604 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" Dec 13 03:28:17 crc kubenswrapper[5070]: E1213 03:28:17.759986 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 13 03:28:17 crc kubenswrapper[5070]: E1213 03:28:17.760272 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mgtbx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(d669856e-7406-451a-825e-9de1fc76f8b2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:28:17 crc kubenswrapper[5070]: E1213 03:28:17.762374 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="d669856e-7406-451a-825e-9de1fc76f8b2" Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.840008 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.845700 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.976016 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-dns-svc\") pod \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.976077 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxqmk\" (UniqueName: \"kubernetes.io/projected/010ab03f-0c20-4b24-8137-284558e10da5-kube-api-access-xxqmk\") pod \"010ab03f-0c20-4b24-8137-284558e10da5\" (UID: \"010ab03f-0c20-4b24-8137-284558e10da5\") " Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.976120 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-config\") pod \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.976266 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb6md\" (UniqueName: \"kubernetes.io/projected/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-kube-api-access-lb6md\") pod \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\" (UID: \"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b\") " Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.976316 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/010ab03f-0c20-4b24-8137-284558e10da5-config\") pod \"010ab03f-0c20-4b24-8137-284558e10da5\" (UID: \"010ab03f-0c20-4b24-8137-284558e10da5\") " Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.976692 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dee9f29c-350c-4461-ba3c-a8ed90b2ce7b" (UID: "dee9f29c-350c-4461-ba3c-a8ed90b2ce7b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.977170 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/010ab03f-0c20-4b24-8137-284558e10da5-config" (OuterVolumeSpecName: "config") pod "010ab03f-0c20-4b24-8137-284558e10da5" (UID: "010ab03f-0c20-4b24-8137-284558e10da5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.977213 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-config" (OuterVolumeSpecName: "config") pod "dee9f29c-350c-4461-ba3c-a8ed90b2ce7b" (UID: "dee9f29c-350c-4461-ba3c-a8ed90b2ce7b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.983309 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-kube-api-access-lb6md" (OuterVolumeSpecName: "kube-api-access-lb6md") pod "dee9f29c-350c-4461-ba3c-a8ed90b2ce7b" (UID: "dee9f29c-350c-4461-ba3c-a8ed90b2ce7b"). InnerVolumeSpecName "kube-api-access-lb6md". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:17 crc kubenswrapper[5070]: I1213 03:28:17.996486 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/010ab03f-0c20-4b24-8137-284558e10da5-kube-api-access-xxqmk" (OuterVolumeSpecName: "kube-api-access-xxqmk") pod "010ab03f-0c20-4b24-8137-284558e10da5" (UID: "010ab03f-0c20-4b24-8137-284558e10da5"). InnerVolumeSpecName "kube-api-access-xxqmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.078389 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.078425 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxqmk\" (UniqueName: \"kubernetes.io/projected/010ab03f-0c20-4b24-8137-284558e10da5-kube-api-access-xxqmk\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.078466 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.078479 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb6md\" (UniqueName: \"kubernetes.io/projected/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b-kube-api-access-lb6md\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.078490 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/010ab03f-0c20-4b24-8137-284558e10da5-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.562680 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" event={"ID":"dee9f29c-350c-4461-ba3c-a8ed90b2ce7b","Type":"ContainerDied","Data":"456da7bed6b0284b4208ded4245a0ba971f093bebe4865e67ab9fa262707472e"} Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.562749 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v9hvp" Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.565429 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" event={"ID":"010ab03f-0c20-4b24-8137-284558e10da5","Type":"ContainerDied","Data":"f762bad237f0bf4c36f7100a287be14152cfd8b022644123fae169b218d91d0a"} Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.565464 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-n4tjw" Dec 13 03:28:18 crc kubenswrapper[5070]: E1213 03:28:18.568317 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" Dec 13 03:28:18 crc kubenswrapper[5070]: E1213 03:28:18.570402 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="d669856e-7406-451a-825e-9de1fc76f8b2" Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.651148 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4tjw"] Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.668140 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4tjw"] Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.680681 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9hvp"] Dec 13 03:28:18 crc kubenswrapper[5070]: I1213 03:28:18.686899 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v9hvp"] Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.178321 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="010ab03f-0c20-4b24-8137-284558e10da5" path="/var/lib/kubelet/pods/010ab03f-0c20-4b24-8137-284558e10da5/volumes" Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.179834 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dee9f29c-350c-4461-ba3c-a8ed90b2ce7b" path="/var/lib/kubelet/pods/dee9f29c-350c-4461-ba3c-a8ed90b2ce7b/volumes" Dec 13 03:28:20 crc kubenswrapper[5070]: W1213 03:28:20.398794 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dd33a5a_3305_45ff_a544_0bae02032d8f.slice/crio-39f7ec462d58aafa48abf18e7e532b85d23e0d5bbe05fcb09f1141789a01e75f WatchSource:0}: Error finding container 39f7ec462d58aafa48abf18e7e532b85d23e0d5bbe05fcb09f1141789a01e75f: Status 404 returned error can't find the container with id 39f7ec462d58aafa48abf18e7e532b85d23e0d5bbe05fcb09f1141789a01e75f Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.399749 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.448936 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 13 03:28:20 crc kubenswrapper[5070]: W1213 03:28:20.455058 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod691f47eb_c110_4423_8dac_5515ac7306e0.slice/crio-904997c53cc5860d76652e6d733fbbbc420ab9513566cdbf4527da844ece466b WatchSource:0}: Error finding container 904997c53cc5860d76652e6d733fbbbc420ab9513566cdbf4527da844ece466b: Status 404 returned error can't find the container with id 904997c53cc5860d76652e6d733fbbbc420ab9513566cdbf4527da844ece466b Dec 13 03:28:20 crc kubenswrapper[5070]: W1213 03:28:20.455705 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8cecacd_8f32_41ea_a0bb_1b1cab38ad2b.slice/crio-be20d13044737784025bb23ca2e118a648b4d720f7e015ced677bc3def0ed5cb WatchSource:0}: Error finding container be20d13044737784025bb23ca2e118a648b4d720f7e015ced677bc3def0ed5cb: Status 404 returned error can't find the container with id be20d13044737784025bb23ca2e118a648b4d720f7e015ced677bc3def0ed5cb Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.455932 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.584697 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b","Type":"ContainerStarted","Data":"be20d13044737784025bb23ca2e118a648b4d720f7e015ced677bc3def0ed5cb"} Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.596872 5070 generic.go:334] "Generic (PLEG): container finished" podID="847376ab-5cb3-46ed-8b13-f0a21f09c135" containerID="bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5" exitCode=0 Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.596975 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" event={"ID":"847376ab-5cb3-46ed-8b13-f0a21f09c135","Type":"ContainerDied","Data":"bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5"} Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.599430 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"691f47eb-c110-4423-8dac-5515ac7306e0","Type":"ContainerStarted","Data":"904997c53cc5860d76652e6d733fbbbc420ab9513566cdbf4527da844ece466b"} Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.601568 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9dd33a5a-3305-45ff-a544-0bae02032d8f","Type":"ContainerStarted","Data":"806e1156eb53de4563184442eb7525c48fb44a79f1ff9f9fbbaf40aea4eec858"} Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.601597 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9dd33a5a-3305-45ff-a544-0bae02032d8f","Type":"ContainerStarted","Data":"39f7ec462d58aafa48abf18e7e532b85d23e0d5bbe05fcb09f1141789a01e75f"} Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.606744 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"df672553-0b7d-4554-9ff6-c5d9a674dffd","Type":"ContainerStarted","Data":"aa5c739d646acf2243774f894a5a215c7434b7e5c29425baf4be18587ec0c71f"} Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.710481 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-x4xfl"] Dec 13 03:28:20 crc kubenswrapper[5070]: W1213 03:28:20.734198 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbcbdef3_6b6e_442e_9a5a_3bc14faf3be4.slice/crio-41175b3370ac17adcb65045191a1b3ab2e2e08553a601692ec1d8e68f8a87ee5 WatchSource:0}: Error finding container 41175b3370ac17adcb65045191a1b3ab2e2e08553a601692ec1d8e68f8a87ee5: Status 404 returned error can't find the container with id 41175b3370ac17adcb65045191a1b3ab2e2e08553a601692ec1d8e68f8a87ee5 Dec 13 03:28:20 crc kubenswrapper[5070]: W1213 03:28:20.734599 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod077843d3_f8ee_476e_b18c_da48a3a2f200.slice/crio-be02855067735f286f0c9b86abf9209a81a307ce5fd318bc9a549b24195cc867 WatchSource:0}: Error finding container be02855067735f286f0c9b86abf9209a81a307ce5fd318bc9a549b24195cc867: Status 404 returned error can't find the container with id be02855067735f286f0c9b86abf9209a81a307ce5fd318bc9a549b24195cc867 Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.745173 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 13 03:28:20 crc kubenswrapper[5070]: I1213 03:28:20.792875 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-zmbl5"] Dec 13 03:28:20 crc kubenswrapper[5070]: W1213 03:28:20.800413 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f8ec385_4f69_4645_95d2_2d854b3fac57.slice/crio-ab92b569b08cb7a394a44769e73749921cdbaad135879a37fbe06ef20be88aeb WatchSource:0}: Error finding container ab92b569b08cb7a394a44769e73749921cdbaad135879a37fbe06ef20be88aeb: Status 404 returned error can't find the container with id ab92b569b08cb7a394a44769e73749921cdbaad135879a37fbe06ef20be88aeb Dec 13 03:28:21 crc kubenswrapper[5070]: I1213 03:28:21.555387 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 13 03:28:21 crc kubenswrapper[5070]: I1213 03:28:21.616155 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zmbl5" event={"ID":"5f8ec385-4f69-4645-95d2-2d854b3fac57","Type":"ContainerStarted","Data":"ab92b569b08cb7a394a44769e73749921cdbaad135879a37fbe06ef20be88aeb"} Dec 13 03:28:21 crc kubenswrapper[5070]: I1213 03:28:21.619289 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" event={"ID":"847376ab-5cb3-46ed-8b13-f0a21f09c135","Type":"ContainerStarted","Data":"81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019"} Dec 13 03:28:21 crc kubenswrapper[5070]: I1213 03:28:21.619709 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:28:21 crc kubenswrapper[5070]: I1213 03:28:21.621531 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-x4xfl" event={"ID":"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4","Type":"ContainerStarted","Data":"41175b3370ac17adcb65045191a1b3ab2e2e08553a601692ec1d8e68f8a87ee5"} Dec 13 03:28:21 crc kubenswrapper[5070]: I1213 03:28:21.622378 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"077843d3-f8ee-476e-b18c-da48a3a2f200","Type":"ContainerStarted","Data":"be02855067735f286f0c9b86abf9209a81a307ce5fd318bc9a549b24195cc867"} Dec 13 03:28:21 crc kubenswrapper[5070]: I1213 03:28:21.643565 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" podStartSLOduration=4.115552521 podStartE2EDuration="27.643545725s" podCreationTimestamp="2025-12-13 03:27:54 +0000 UTC" firstStartedPulling="2025-12-13 03:27:56.23672039 +0000 UTC m=+968.472563936" lastFinishedPulling="2025-12-13 03:28:19.764713604 +0000 UTC m=+992.000557140" observedRunningTime="2025-12-13 03:28:21.635953777 +0000 UTC m=+993.871797323" watchObservedRunningTime="2025-12-13 03:28:21.643545725 +0000 UTC m=+993.879389271" Dec 13 03:28:21 crc kubenswrapper[5070]: W1213 03:28:21.699924 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod037626c8_04bb_4af4_a5f3_309c3c174f98.slice/crio-c52731d0bf4ebeaf5f981f3bf36ec40491ccd8988049e5374f1bbad3e9f66849 WatchSource:0}: Error finding container c52731d0bf4ebeaf5f981f3bf36ec40491ccd8988049e5374f1bbad3e9f66849: Status 404 returned error can't find the container with id c52731d0bf4ebeaf5f981f3bf36ec40491ccd8988049e5374f1bbad3e9f66849 Dec 13 03:28:22 crc kubenswrapper[5070]: I1213 03:28:22.630682 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"037626c8-04bb-4af4-a5f3-309c3c174f98","Type":"ContainerStarted","Data":"c52731d0bf4ebeaf5f981f3bf36ec40491ccd8988049e5374f1bbad3e9f66849"} Dec 13 03:28:23 crc kubenswrapper[5070]: I1213 03:28:23.641586 5070 generic.go:334] "Generic (PLEG): container finished" podID="df672553-0b7d-4554-9ff6-c5d9a674dffd" containerID="aa5c739d646acf2243774f894a5a215c7434b7e5c29425baf4be18587ec0c71f" exitCode=0 Dec 13 03:28:23 crc kubenswrapper[5070]: I1213 03:28:23.641615 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"df672553-0b7d-4554-9ff6-c5d9a674dffd","Type":"ContainerDied","Data":"aa5c739d646acf2243774f894a5a215c7434b7e5c29425baf4be18587ec0c71f"} Dec 13 03:28:24 crc kubenswrapper[5070]: I1213 03:28:24.648739 5070 generic.go:334] "Generic (PLEG): container finished" podID="9dd33a5a-3305-45ff-a544-0bae02032d8f" containerID="806e1156eb53de4563184442eb7525c48fb44a79f1ff9f9fbbaf40aea4eec858" exitCode=0 Dec 13 03:28:24 crc kubenswrapper[5070]: I1213 03:28:24.648785 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9dd33a5a-3305-45ff-a544-0bae02032d8f","Type":"ContainerDied","Data":"806e1156eb53de4563184442eb7525c48fb44a79f1ff9f9fbbaf40aea4eec858"} Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.645330 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.672850 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"037626c8-04bb-4af4-a5f3-309c3c174f98","Type":"ContainerStarted","Data":"aa58467579c096f81e2bfd921eb5c45de673d74a574161b8c6b7c56dc6f4383c"} Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.682405 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b","Type":"ContainerStarted","Data":"07237b4d31b51b63bcb69a4f31c8f60ef3ae39809da216e1fdb809b79069f664"} Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.683361 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.693158 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7z824" event={"ID":"3e429df3-9f7f-40e4-8d73-d3c492104561","Type":"ContainerStarted","Data":"918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7"} Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.704364 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=21.911555961 podStartE2EDuration="26.704347955s" podCreationTimestamp="2025-12-13 03:27:59 +0000 UTC" firstStartedPulling="2025-12-13 03:28:20.458809824 +0000 UTC m=+992.694653370" lastFinishedPulling="2025-12-13 03:28:25.251601828 +0000 UTC m=+997.487445364" observedRunningTime="2025-12-13 03:28:25.703728238 +0000 UTC m=+997.939571784" watchObservedRunningTime="2025-12-13 03:28:25.704347955 +0000 UTC m=+997.940191501" Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.729298 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"9dd33a5a-3305-45ff-a544-0bae02032d8f","Type":"ContainerStarted","Data":"141f38c0089303a35fd526827809b4b4ac447c29c01760471dfa24a79c095de7"} Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.740671 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7z824"] Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.768952 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"df672553-0b7d-4554-9ff6-c5d9a674dffd","Type":"ContainerStarted","Data":"c687386cc42affe38ee3c361137eff3641ec6220803267031b0f5549ddc2b6ce"} Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.779212 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=27.779183238999998 podStartE2EDuration="27.779183239s" podCreationTimestamp="2025-12-13 03:27:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:28:25.761371023 +0000 UTC m=+997.997214569" watchObservedRunningTime="2025-12-13 03:28:25.779183239 +0000 UTC m=+998.015026795" Dec 13 03:28:25 crc kubenswrapper[5070]: I1213 03:28:25.807418 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.44292777 podStartE2EDuration="29.807394269s" podCreationTimestamp="2025-12-13 03:27:56 +0000 UTC" firstStartedPulling="2025-12-13 03:27:58.438757838 +0000 UTC m=+970.674601394" lastFinishedPulling="2025-12-13 03:28:19.803224337 +0000 UTC m=+992.039067893" observedRunningTime="2025-12-13 03:28:25.797748776 +0000 UTC m=+998.033592322" watchObservedRunningTime="2025-12-13 03:28:25.807394269 +0000 UTC m=+998.043237815" Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.792578 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"077843d3-f8ee-476e-b18c-da48a3a2f200","Type":"ContainerStarted","Data":"c8e64484cf6250ceca6fa5bf4387bdaa2d443b23ad36c9df1a13c95d3260b0b6"} Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.794900 5070 generic.go:334] "Generic (PLEG): container finished" podID="5f8ec385-4f69-4645-95d2-2d854b3fac57" containerID="b94ea72eaa1817f6c03740d50a75c4467a863b40caa75bc70450a70fb0b9f091" exitCode=0 Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.794958 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zmbl5" event={"ID":"5f8ec385-4f69-4645-95d2-2d854b3fac57","Type":"ContainerDied","Data":"b94ea72eaa1817f6c03740d50a75c4467a863b40caa75bc70450a70fb0b9f091"} Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.796827 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"691f47eb-c110-4423-8dac-5515ac7306e0","Type":"ContainerStarted","Data":"e555220c6bd8b607f70338c3f9def7a53af884e6234757b172f7449e6515b719"} Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.797280 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.810343 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-x4xfl" event={"ID":"fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4","Type":"ContainerStarted","Data":"4d53459dd08b875f9fdbbdeaf9270acb60c0f1b8234d438c77193066a3f75064"} Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.810487 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-x4xfl" Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.812691 5070 generic.go:334] "Generic (PLEG): container finished" podID="3e429df3-9f7f-40e4-8d73-d3c492104561" containerID="918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7" exitCode=0 Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.812724 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7z824" event={"ID":"3e429df3-9f7f-40e4-8d73-d3c492104561","Type":"ContainerDied","Data":"918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7"} Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.812759 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7z824" event={"ID":"3e429df3-9f7f-40e4-8d73-d3c492104561","Type":"ContainerStarted","Data":"03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713"} Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.813065 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-7z824" podUID="3e429df3-9f7f-40e4-8d73-d3c492104561" containerName="dnsmasq-dns" containerID="cri-o://03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713" gracePeriod=10 Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.833245 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=21.95269867 podStartE2EDuration="26.833228591s" podCreationTimestamp="2025-12-13 03:28:00 +0000 UTC" firstStartedPulling="2025-12-13 03:28:20.456188282 +0000 UTC m=+992.692031828" lastFinishedPulling="2025-12-13 03:28:25.336718203 +0000 UTC m=+997.572561749" observedRunningTime="2025-12-13 03:28:26.831270707 +0000 UTC m=+999.067114283" watchObservedRunningTime="2025-12-13 03:28:26.833228591 +0000 UTC m=+999.069072127" Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.858603 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-x4xfl" podStartSLOduration=19.278171159 podStartE2EDuration="23.858581683s" podCreationTimestamp="2025-12-13 03:28:03 +0000 UTC" firstStartedPulling="2025-12-13 03:28:20.739415978 +0000 UTC m=+992.975259534" lastFinishedPulling="2025-12-13 03:28:25.319826512 +0000 UTC m=+997.555670058" observedRunningTime="2025-12-13 03:28:26.84822112 +0000 UTC m=+999.084064666" watchObservedRunningTime="2025-12-13 03:28:26.858581683 +0000 UTC m=+999.094425229" Dec 13 03:28:26 crc kubenswrapper[5070]: I1213 03:28:26.866694 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-7z824" podStartSLOduration=-9223372003.988106 podStartE2EDuration="32.866669403s" podCreationTimestamp="2025-12-13 03:27:54 +0000 UTC" firstStartedPulling="2025-12-13 03:27:55.670242476 +0000 UTC m=+967.906086022" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:28:26.865427 +0000 UTC m=+999.101270546" watchObservedRunningTime="2025-12-13 03:28:26.866669403 +0000 UTC m=+999.102512949" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.238276 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.292224 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-config\") pod \"3e429df3-9f7f-40e4-8d73-d3c492104561\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.292290 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-785dj\" (UniqueName: \"kubernetes.io/projected/3e429df3-9f7f-40e4-8d73-d3c492104561-kube-api-access-785dj\") pod \"3e429df3-9f7f-40e4-8d73-d3c492104561\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.292398 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-dns-svc\") pod \"3e429df3-9f7f-40e4-8d73-d3c492104561\" (UID: \"3e429df3-9f7f-40e4-8d73-d3c492104561\") " Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.301628 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e429df3-9f7f-40e4-8d73-d3c492104561-kube-api-access-785dj" (OuterVolumeSpecName: "kube-api-access-785dj") pod "3e429df3-9f7f-40e4-8d73-d3c492104561" (UID: "3e429df3-9f7f-40e4-8d73-d3c492104561"). InnerVolumeSpecName "kube-api-access-785dj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.333484 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3e429df3-9f7f-40e4-8d73-d3c492104561" (UID: "3e429df3-9f7f-40e4-8d73-d3c492104561"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.333954 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-config" (OuterVolumeSpecName: "config") pod "3e429df3-9f7f-40e4-8d73-d3c492104561" (UID: "3e429df3-9f7f-40e4-8d73-d3c492104561"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.394336 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.394364 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e429df3-9f7f-40e4-8d73-d3c492104561-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.394378 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-785dj\" (UniqueName: \"kubernetes.io/projected/3e429df3-9f7f-40e4-8d73-d3c492104561-kube-api-access-785dj\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.824591 5070 generic.go:334] "Generic (PLEG): container finished" podID="3e429df3-9f7f-40e4-8d73-d3c492104561" containerID="03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713" exitCode=0 Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.824671 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7z824" event={"ID":"3e429df3-9f7f-40e4-8d73-d3c492104561","Type":"ContainerDied","Data":"03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713"} Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.824969 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-7z824" event={"ID":"3e429df3-9f7f-40e4-8d73-d3c492104561","Type":"ContainerDied","Data":"c4350282aa912190059cf07358c2309b3112f2566a616c4b853446881f3055cb"} Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.824681 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-7z824" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.825006 5070 scope.go:117] "RemoveContainer" containerID="03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.829783 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zmbl5" event={"ID":"5f8ec385-4f69-4645-95d2-2d854b3fac57","Type":"ContainerStarted","Data":"1c4ede85f675f0f8e9bdb814ccdf81b0c55fa753d7d923d5a9a50e124a910734"} Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.829834 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zmbl5" event={"ID":"5f8ec385-4f69-4645-95d2-2d854b3fac57","Type":"ContainerStarted","Data":"56ed743365f09081cbc91104896377400bb98097b9425149879031777a5307af"} Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.887928 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-zmbl5" podStartSLOduration=20.383188543 podStartE2EDuration="24.887896088s" podCreationTimestamp="2025-12-13 03:28:03 +0000 UTC" firstStartedPulling="2025-12-13 03:28:20.802611735 +0000 UTC m=+993.038455281" lastFinishedPulling="2025-12-13 03:28:25.30731928 +0000 UTC m=+997.543162826" observedRunningTime="2025-12-13 03:28:27.855704408 +0000 UTC m=+1000.091547964" watchObservedRunningTime="2025-12-13 03:28:27.887896088 +0000 UTC m=+1000.123739674" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.891477 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7z824"] Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.899843 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-7z824"] Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.937944 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 13 03:28:27 crc kubenswrapper[5070]: I1213 03:28:27.937993 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 13 03:28:28 crc kubenswrapper[5070]: I1213 03:28:28.184031 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e429df3-9f7f-40e4-8d73-d3c492104561" path="/var/lib/kubelet/pods/3e429df3-9f7f-40e4-8d73-d3c492104561/volumes" Dec 13 03:28:28 crc kubenswrapper[5070]: I1213 03:28:28.837098 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:28 crc kubenswrapper[5070]: I1213 03:28:28.837147 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.358877 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.359298 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.400024 5070 scope.go:117] "RemoveContainer" containerID="918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7" Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.458416 5070 scope.go:117] "RemoveContainer" containerID="03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713" Dec 13 03:28:29 crc kubenswrapper[5070]: E1213 03:28:29.458711 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713\": container with ID starting with 03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713 not found: ID does not exist" containerID="03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713" Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.458740 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713"} err="failed to get container status \"03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713\": rpc error: code = NotFound desc = could not find container \"03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713\": container with ID starting with 03161968d3fe0c2240cf4a2f3f13a331e93aeb737ad26bc8b3ebd7c8f6f5a713 not found: ID does not exist" Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.458761 5070 scope.go:117] "RemoveContainer" containerID="918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7" Dec 13 03:28:29 crc kubenswrapper[5070]: E1213 03:28:29.459010 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7\": container with ID starting with 918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7 not found: ID does not exist" containerID="918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7" Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.459045 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7"} err="failed to get container status \"918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7\": rpc error: code = NotFound desc = could not find container \"918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7\": container with ID starting with 918f92df719c86e4cdd762c8a68c169f531051aafdebc64be697330a064552b7 not found: ID does not exist" Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.850818 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"037626c8-04bb-4af4-a5f3-309c3c174f98","Type":"ContainerStarted","Data":"0d9f320b4dba8706ddbdc396e19423948e9fb3af0041f87d365e0ec24c983e15"} Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.855944 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"077843d3-f8ee-476e-b18c-da48a3a2f200","Type":"ContainerStarted","Data":"202f74d4c4b8562fbad3aeb4fb6255c2587ec0b5a51ea0db7d07763911f979c6"} Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.906785 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=14.127551191 podStartE2EDuration="22.906762284s" podCreationTimestamp="2025-12-13 03:28:07 +0000 UTC" firstStartedPulling="2025-12-13 03:28:20.73986021 +0000 UTC m=+992.975703756" lastFinishedPulling="2025-12-13 03:28:29.519071303 +0000 UTC m=+1001.754914849" observedRunningTime="2025-12-13 03:28:29.901403197 +0000 UTC m=+1002.137246763" watchObservedRunningTime="2025-12-13 03:28:29.906762284 +0000 UTC m=+1002.142605840" Dec 13 03:28:29 crc kubenswrapper[5070]: I1213 03:28:29.907639 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=17.144443307 podStartE2EDuration="24.907626767s" podCreationTimestamp="2025-12-13 03:28:05 +0000 UTC" firstStartedPulling="2025-12-13 03:28:21.702938187 +0000 UTC m=+993.938781733" lastFinishedPulling="2025-12-13 03:28:29.466121647 +0000 UTC m=+1001.701965193" observedRunningTime="2025-12-13 03:28:29.880148076 +0000 UTC m=+1002.115991622" watchObservedRunningTime="2025-12-13 03:28:29.907626767 +0000 UTC m=+1002.143470323" Dec 13 03:28:30 crc kubenswrapper[5070]: I1213 03:28:30.504322 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 13 03:28:30 crc kubenswrapper[5070]: I1213 03:28:30.551001 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 13 03:28:30 crc kubenswrapper[5070]: I1213 03:28:30.787869 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:30 crc kubenswrapper[5070]: I1213 03:28:30.849047 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:30 crc kubenswrapper[5070]: I1213 03:28:30.864295 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:30 crc kubenswrapper[5070]: I1213 03:28:30.911068 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.170420 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-t24wc"] Dec 13 03:28:31 crc kubenswrapper[5070]: E1213 03:28:31.171090 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e429df3-9f7f-40e4-8d73-d3c492104561" containerName="init" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.171106 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e429df3-9f7f-40e4-8d73-d3c492104561" containerName="init" Dec 13 03:28:31 crc kubenswrapper[5070]: E1213 03:28:31.171134 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e429df3-9f7f-40e4-8d73-d3c492104561" containerName="dnsmasq-dns" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.171142 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e429df3-9f7f-40e4-8d73-d3c492104561" containerName="dnsmasq-dns" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.171356 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e429df3-9f7f-40e4-8d73-d3c492104561" containerName="dnsmasq-dns" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.173382 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.175801 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.187190 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-t24wc"] Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.235962 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zjv4\" (UniqueName: \"kubernetes.io/projected/9147c924-b927-4947-a59b-b3d89dfd6f89-kube-api-access-9zjv4\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.236096 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-config\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.236229 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.236345 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.277291 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-dvs7b"] Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.278284 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.283028 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.286314 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.288561 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-dvs7b"] Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.337183 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-config\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.337234 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/30ad4728-1762-464a-a3ab-a24923973e0e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.337268 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2b2c\" (UniqueName: \"kubernetes.io/projected/30ad4728-1762-464a-a3ab-a24923973e0e-kube-api-access-q2b2c\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.337292 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.337341 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/30ad4728-1762-464a-a3ab-a24923973e0e-ovn-rundir\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.337382 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/30ad4728-1762-464a-a3ab-a24923973e0e-ovs-rundir\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.337430 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.337501 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30ad4728-1762-464a-a3ab-a24923973e0e-config\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.337527 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ad4728-1762-464a-a3ab-a24923973e0e-combined-ca-bundle\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.337713 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zjv4\" (UniqueName: \"kubernetes.io/projected/9147c924-b927-4947-a59b-b3d89dfd6f89-kube-api-access-9zjv4\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.339049 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.339412 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-config\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.339621 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.360239 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zjv4\" (UniqueName: \"kubernetes.io/projected/9147c924-b927-4947-a59b-b3d89dfd6f89-kube-api-access-9zjv4\") pod \"dnsmasq-dns-5bf47b49b7-t24wc\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.440911 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/30ad4728-1762-464a-a3ab-a24923973e0e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.440967 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2b2c\" (UniqueName: \"kubernetes.io/projected/30ad4728-1762-464a-a3ab-a24923973e0e-kube-api-access-q2b2c\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.441020 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/30ad4728-1762-464a-a3ab-a24923973e0e-ovn-rundir\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.441050 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/30ad4728-1762-464a-a3ab-a24923973e0e-ovs-rundir\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.441104 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30ad4728-1762-464a-a3ab-a24923973e0e-config\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.441135 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ad4728-1762-464a-a3ab-a24923973e0e-combined-ca-bundle\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.441413 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/30ad4728-1762-464a-a3ab-a24923973e0e-ovn-rundir\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.441413 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/30ad4728-1762-464a-a3ab-a24923973e0e-ovs-rundir\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.443132 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30ad4728-1762-464a-a3ab-a24923973e0e-config\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.449754 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/30ad4728-1762-464a-a3ab-a24923973e0e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.450272 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ad4728-1762-464a-a3ab-a24923973e0e-combined-ca-bundle\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.456203 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2b2c\" (UniqueName: \"kubernetes.io/projected/30ad4728-1762-464a-a3ab-a24923973e0e-kube-api-access-q2b2c\") pod \"ovn-controller-metrics-dvs7b\" (UID: \"30ad4728-1762-464a-a3ab-a24923973e0e\") " pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.498839 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.600108 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-dvs7b" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.685187 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-t24wc"] Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.781875 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-dtgg8"] Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.783461 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.786813 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.791992 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-dtgg8"] Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.858546 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.858853 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-config\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.858885 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-dns-svc\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.858908 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-456jz\" (UniqueName: \"kubernetes.io/projected/9368a062-8e24-48ec-9a22-c29f1c906361-kube-api-access-456jz\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.858943 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.961345 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.961717 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-config\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.961751 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-dns-svc\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.961803 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-456jz\" (UniqueName: \"kubernetes.io/projected/9368a062-8e24-48ec-9a22-c29f1c906361-kube-api-access-456jz\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.961846 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.962160 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.962833 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-config\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.963336 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.963355 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-dns-svc\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:31 crc kubenswrapper[5070]: I1213 03:28:31.990788 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-456jz\" (UniqueName: \"kubernetes.io/projected/9368a062-8e24-48ec-9a22-c29f1c906361-kube-api-access-456jz\") pod \"dnsmasq-dns-8554648995-dtgg8\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.132047 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.145858 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-t24wc"] Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.386868 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-dtgg8"] Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.396843 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-dvs7b"] Dec 13 03:28:32 crc kubenswrapper[5070]: W1213 03:28:32.445159 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9368a062_8e24_48ec_9a22_c29f1c906361.slice/crio-15f99c70ef592730938994014ec363d8845c01410abd4764b5d37e3de2c35578 WatchSource:0}: Error finding container 15f99c70ef592730938994014ec363d8845c01410abd4764b5d37e3de2c35578: Status 404 returned error can't find the container with id 15f99c70ef592730938994014ec363d8845c01410abd4764b5d37e3de2c35578 Dec 13 03:28:32 crc kubenswrapper[5070]: W1213 03:28:32.445490 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30ad4728_1762_464a_a3ab_a24923973e0e.slice/crio-1d01e678e4132e084f8dddf55ddb007b1441cf7e255c59d6ef46a0fc81d71347 WatchSource:0}: Error finding container 1d01e678e4132e084f8dddf55ddb007b1441cf7e255c59d6ef46a0fc81d71347: Status 404 returned error can't find the container with id 1d01e678e4132e084f8dddf55ddb007b1441cf7e255c59d6ef46a0fc81d71347 Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.582498 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.883176 5070 generic.go:334] "Generic (PLEG): container finished" podID="9368a062-8e24-48ec-9a22-c29f1c906361" containerID="2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2" exitCode=0 Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.883278 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-dtgg8" event={"ID":"9368a062-8e24-48ec-9a22-c29f1c906361","Type":"ContainerDied","Data":"2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2"} Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.883316 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-dtgg8" event={"ID":"9368a062-8e24-48ec-9a22-c29f1c906361","Type":"ContainerStarted","Data":"15f99c70ef592730938994014ec363d8845c01410abd4764b5d37e3de2c35578"} Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.886131 5070 generic.go:334] "Generic (PLEG): container finished" podID="9147c924-b927-4947-a59b-b3d89dfd6f89" containerID="fa1ec22cc4d75591cc37a7d5ab0c8210411e38d7f1bec2d40493d4df5266ff8b" exitCode=0 Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.886180 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" event={"ID":"9147c924-b927-4947-a59b-b3d89dfd6f89","Type":"ContainerDied","Data":"fa1ec22cc4d75591cc37a7d5ab0c8210411e38d7f1bec2d40493d4df5266ff8b"} Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.886235 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" event={"ID":"9147c924-b927-4947-a59b-b3d89dfd6f89","Type":"ContainerStarted","Data":"ee0e4232d74f9160de8e409a1813a5c2a2b9601e5a78831eebe67028eb25ab4c"} Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.889457 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-dvs7b" event={"ID":"30ad4728-1762-464a-a3ab-a24923973e0e","Type":"ContainerStarted","Data":"0127ef9156a8647d18b7dccbb048cde059c7cb119ca73b83ddfb87076b64cb0b"} Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.889499 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-dvs7b" event={"ID":"30ad4728-1762-464a-a3ab-a24923973e0e","Type":"ContainerStarted","Data":"1d01e678e4132e084f8dddf55ddb007b1441cf7e255c59d6ef46a0fc81d71347"} Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.908654 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.909705 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:32 crc kubenswrapper[5070]: I1213 03:28:32.952377 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-dvs7b" podStartSLOduration=1.952356903 podStartE2EDuration="1.952356903s" podCreationTimestamp="2025-12-13 03:28:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:28:32.948054635 +0000 UTC m=+1005.183898191" watchObservedRunningTime="2025-12-13 03:28:32.952356903 +0000 UTC m=+1005.188200449" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.115274 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.354930 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.356911 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.360520 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.360569 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.360716 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-g7x25" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.362302 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.386679 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.410279 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.441171 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.491633 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.518394 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-ovsdbserver-nb\") pod \"9147c924-b927-4947-a59b-b3d89dfd6f89\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.518508 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zjv4\" (UniqueName: \"kubernetes.io/projected/9147c924-b927-4947-a59b-b3d89dfd6f89-kube-api-access-9zjv4\") pod \"9147c924-b927-4947-a59b-b3d89dfd6f89\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.518545 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-config\") pod \"9147c924-b927-4947-a59b-b3d89dfd6f89\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.518701 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-dns-svc\") pod \"9147c924-b927-4947-a59b-b3d89dfd6f89\" (UID: \"9147c924-b927-4947-a59b-b3d89dfd6f89\") " Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.518893 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35f8712b-8f90-45cb-a2c6-dbbcc7357542-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.518931 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/35f8712b-8f90-45cb-a2c6-dbbcc7357542-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.518956 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f8712b-8f90-45cb-a2c6-dbbcc7357542-config\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.519012 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m78rc\" (UniqueName: \"kubernetes.io/projected/35f8712b-8f90-45cb-a2c6-dbbcc7357542-kube-api-access-m78rc\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.519038 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/35f8712b-8f90-45cb-a2c6-dbbcc7357542-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.519058 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35f8712b-8f90-45cb-a2c6-dbbcc7357542-scripts\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.519083 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/35f8712b-8f90-45cb-a2c6-dbbcc7357542-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.527738 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9147c924-b927-4947-a59b-b3d89dfd6f89-kube-api-access-9zjv4" (OuterVolumeSpecName: "kube-api-access-9zjv4") pod "9147c924-b927-4947-a59b-b3d89dfd6f89" (UID: "9147c924-b927-4947-a59b-b3d89dfd6f89"). InnerVolumeSpecName "kube-api-access-9zjv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.550296 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9147c924-b927-4947-a59b-b3d89dfd6f89" (UID: "9147c924-b927-4947-a59b-b3d89dfd6f89"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.552672 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-config" (OuterVolumeSpecName: "config") pod "9147c924-b927-4947-a59b-b3d89dfd6f89" (UID: "9147c924-b927-4947-a59b-b3d89dfd6f89"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.576729 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9147c924-b927-4947-a59b-b3d89dfd6f89" (UID: "9147c924-b927-4947-a59b-b3d89dfd6f89"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620083 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m78rc\" (UniqueName: \"kubernetes.io/projected/35f8712b-8f90-45cb-a2c6-dbbcc7357542-kube-api-access-m78rc\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620138 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/35f8712b-8f90-45cb-a2c6-dbbcc7357542-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620161 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35f8712b-8f90-45cb-a2c6-dbbcc7357542-scripts\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620198 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/35f8712b-8f90-45cb-a2c6-dbbcc7357542-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620241 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35f8712b-8f90-45cb-a2c6-dbbcc7357542-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620293 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/35f8712b-8f90-45cb-a2c6-dbbcc7357542-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620316 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f8712b-8f90-45cb-a2c6-dbbcc7357542-config\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620469 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620487 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620502 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zjv4\" (UniqueName: \"kubernetes.io/projected/9147c924-b927-4947-a59b-b3d89dfd6f89-kube-api-access-9zjv4\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.620513 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9147c924-b927-4947-a59b-b3d89dfd6f89-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.622016 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/35f8712b-8f90-45cb-a2c6-dbbcc7357542-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.623725 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35f8712b-8f90-45cb-a2c6-dbbcc7357542-scripts\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.624411 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/35f8712b-8f90-45cb-a2c6-dbbcc7357542-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.626115 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35f8712b-8f90-45cb-a2c6-dbbcc7357542-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.629136 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/35f8712b-8f90-45cb-a2c6-dbbcc7357542-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.630348 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f8712b-8f90-45cb-a2c6-dbbcc7357542-config\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.636780 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m78rc\" (UniqueName: \"kubernetes.io/projected/35f8712b-8f90-45cb-a2c6-dbbcc7357542-kube-api-access-m78rc\") pod \"ovn-northd-0\" (UID: \"35f8712b-8f90-45cb-a2c6-dbbcc7357542\") " pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.672650 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.902250 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d669856e-7406-451a-825e-9de1fc76f8b2","Type":"ContainerStarted","Data":"a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782"} Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.904643 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" event={"ID":"9147c924-b927-4947-a59b-b3d89dfd6f89","Type":"ContainerDied","Data":"ee0e4232d74f9160de8e409a1813a5c2a2b9601e5a78831eebe67028eb25ab4c"} Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.904708 5070 scope.go:117] "RemoveContainer" containerID="fa1ec22cc4d75591cc37a7d5ab0c8210411e38d7f1bec2d40493d4df5266ff8b" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.904703 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-t24wc" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.908316 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-dtgg8" event={"ID":"9368a062-8e24-48ec-9a22-c29f1c906361","Type":"ContainerStarted","Data":"7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca"} Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.908507 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:33 crc kubenswrapper[5070]: I1213 03:28:33.910980 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9dd13bd9-bfbd-4f80-b334-d8b959a6187d","Type":"ContainerStarted","Data":"6c4ef47d6ecc0948e5e89e4b0a9e280fb1e18267e47c12f63307114b9cf1d371"} Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.003781 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-dtgg8" podStartSLOduration=3.003762912 podStartE2EDuration="3.003762912s" podCreationTimestamp="2025-12-13 03:28:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:28:33.969018043 +0000 UTC m=+1006.204861599" watchObservedRunningTime="2025-12-13 03:28:34.003762912 +0000 UTC m=+1006.239606458" Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.010817 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-t24wc"] Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.015933 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-t24wc"] Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.089297 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.176331 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9147c924-b927-4947-a59b-b3d89dfd6f89" path="/var/lib/kubelet/pods/9147c924-b927-4947-a59b-b3d89dfd6f89/volumes" Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.664744 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.917354 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-hsxvf"] Dec 13 03:28:34 crc kubenswrapper[5070]: E1213 03:28:34.918043 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9147c924-b927-4947-a59b-b3d89dfd6f89" containerName="init" Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.918063 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9147c924-b927-4947-a59b-b3d89dfd6f89" containerName="init" Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.918272 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="9147c924-b927-4947-a59b-b3d89dfd6f89" containerName="init" Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.918792 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hsxvf" Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.925230 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-hsxvf"] Dec 13 03:28:34 crc kubenswrapper[5070]: I1213 03:28:34.958612 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"35f8712b-8f90-45cb-a2c6-dbbcc7357542","Type":"ContainerStarted","Data":"6d81f14efc2038eda55c78fd1ba78edd7ddfb42f3a58393f50e7a7140cfda872"} Dec 13 03:28:35 crc kubenswrapper[5070]: I1213 03:28:35.057589 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsptj\" (UniqueName: \"kubernetes.io/projected/cae1b496-66ab-43e4-9a6a-95cb71b5fd49-kube-api-access-lsptj\") pod \"glance-db-create-hsxvf\" (UID: \"cae1b496-66ab-43e4-9a6a-95cb71b5fd49\") " pod="openstack/glance-db-create-hsxvf" Dec 13 03:28:35 crc kubenswrapper[5070]: I1213 03:28:35.159717 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsptj\" (UniqueName: \"kubernetes.io/projected/cae1b496-66ab-43e4-9a6a-95cb71b5fd49-kube-api-access-lsptj\") pod \"glance-db-create-hsxvf\" (UID: \"cae1b496-66ab-43e4-9a6a-95cb71b5fd49\") " pod="openstack/glance-db-create-hsxvf" Dec 13 03:28:35 crc kubenswrapper[5070]: I1213 03:28:35.182068 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsptj\" (UniqueName: \"kubernetes.io/projected/cae1b496-66ab-43e4-9a6a-95cb71b5fd49-kube-api-access-lsptj\") pod \"glance-db-create-hsxvf\" (UID: \"cae1b496-66ab-43e4-9a6a-95cb71b5fd49\") " pod="openstack/glance-db-create-hsxvf" Dec 13 03:28:35 crc kubenswrapper[5070]: I1213 03:28:35.276183 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hsxvf" Dec 13 03:28:35 crc kubenswrapper[5070]: I1213 03:28:35.788996 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-hsxvf"] Dec 13 03:28:35 crc kubenswrapper[5070]: W1213 03:28:35.832790 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcae1b496_66ab_43e4_9a6a_95cb71b5fd49.slice/crio-5f056be2e1b696e6310fe5df47fb07563a2eb1b2854be2c7f5b271710573af62 WatchSource:0}: Error finding container 5f056be2e1b696e6310fe5df47fb07563a2eb1b2854be2c7f5b271710573af62: Status 404 returned error can't find the container with id 5f056be2e1b696e6310fe5df47fb07563a2eb1b2854be2c7f5b271710573af62 Dec 13 03:28:35 crc kubenswrapper[5070]: I1213 03:28:35.969385 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-hsxvf" event={"ID":"cae1b496-66ab-43e4-9a6a-95cb71b5fd49","Type":"ContainerStarted","Data":"5f056be2e1b696e6310fe5df47fb07563a2eb1b2854be2c7f5b271710573af62"} Dec 13 03:28:35 crc kubenswrapper[5070]: I1213 03:28:35.971606 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"35f8712b-8f90-45cb-a2c6-dbbcc7357542","Type":"ContainerStarted","Data":"4dd79d5e034402bf8d0b41b6f5d8fc81c362465ffd26c66760c427fb322a8141"} Dec 13 03:28:35 crc kubenswrapper[5070]: I1213 03:28:35.971631 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"35f8712b-8f90-45cb-a2c6-dbbcc7357542","Type":"ContainerStarted","Data":"24a0b35c2718c62c899ccd97a0709b4c54753f82ed8f332bd6df35a2d8c3ba6d"} Dec 13 03:28:35 crc kubenswrapper[5070]: I1213 03:28:35.971857 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 13 03:28:36 crc kubenswrapper[5070]: I1213 03:28:36.004433 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.75894347 podStartE2EDuration="3.004413159s" podCreationTimestamp="2025-12-13 03:28:33 +0000 UTC" firstStartedPulling="2025-12-13 03:28:34.106726275 +0000 UTC m=+1006.342569821" lastFinishedPulling="2025-12-13 03:28:35.352195964 +0000 UTC m=+1007.588039510" observedRunningTime="2025-12-13 03:28:35.997965924 +0000 UTC m=+1008.233809480" watchObservedRunningTime="2025-12-13 03:28:36.004413159 +0000 UTC m=+1008.240256705" Dec 13 03:28:36 crc kubenswrapper[5070]: I1213 03:28:36.983500 5070 generic.go:334] "Generic (PLEG): container finished" podID="cae1b496-66ab-43e4-9a6a-95cb71b5fd49" containerID="8f2d54109003d5c0234f52e6a00aaa806946325be3d82cb61dc337f285d6dff6" exitCode=0 Dec 13 03:28:36 crc kubenswrapper[5070]: I1213 03:28:36.983882 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-hsxvf" event={"ID":"cae1b496-66ab-43e4-9a6a-95cb71b5fd49","Type":"ContainerDied","Data":"8f2d54109003d5c0234f52e6a00aaa806946325be3d82cb61dc337f285d6dff6"} Dec 13 03:28:38 crc kubenswrapper[5070]: I1213 03:28:38.515683 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hsxvf" Dec 13 03:28:38 crc kubenswrapper[5070]: I1213 03:28:38.631432 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsptj\" (UniqueName: \"kubernetes.io/projected/cae1b496-66ab-43e4-9a6a-95cb71b5fd49-kube-api-access-lsptj\") pod \"cae1b496-66ab-43e4-9a6a-95cb71b5fd49\" (UID: \"cae1b496-66ab-43e4-9a6a-95cb71b5fd49\") " Dec 13 03:28:38 crc kubenswrapper[5070]: I1213 03:28:38.638616 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cae1b496-66ab-43e4-9a6a-95cb71b5fd49-kube-api-access-lsptj" (OuterVolumeSpecName: "kube-api-access-lsptj") pod "cae1b496-66ab-43e4-9a6a-95cb71b5fd49" (UID: "cae1b496-66ab-43e4-9a6a-95cb71b5fd49"). InnerVolumeSpecName "kube-api-access-lsptj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:38 crc kubenswrapper[5070]: I1213 03:28:38.733714 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsptj\" (UniqueName: \"kubernetes.io/projected/cae1b496-66ab-43e4-9a6a-95cb71b5fd49-kube-api-access-lsptj\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.002992 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-hsxvf" event={"ID":"cae1b496-66ab-43e4-9a6a-95cb71b5fd49","Type":"ContainerDied","Data":"5f056be2e1b696e6310fe5df47fb07563a2eb1b2854be2c7f5b271710573af62"} Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.003039 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f056be2e1b696e6310fe5df47fb07563a2eb1b2854be2c7f5b271710573af62" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.003054 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hsxvf" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.196138 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-czmmk"] Dec 13 03:28:39 crc kubenswrapper[5070]: E1213 03:28:39.196673 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cae1b496-66ab-43e4-9a6a-95cb71b5fd49" containerName="mariadb-database-create" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.196701 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="cae1b496-66ab-43e4-9a6a-95cb71b5fd49" containerName="mariadb-database-create" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.198743 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="cae1b496-66ab-43e4-9a6a-95cb71b5fd49" containerName="mariadb-database-create" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.199568 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-czmmk" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.205723 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-czmmk"] Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.349016 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scgdn\" (UniqueName: \"kubernetes.io/projected/9352dffa-7117-42a7-b012-a78eee3dae3a-kube-api-access-scgdn\") pod \"keystone-db-create-czmmk\" (UID: \"9352dffa-7117-42a7-b012-a78eee3dae3a\") " pod="openstack/keystone-db-create-czmmk" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.450350 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scgdn\" (UniqueName: \"kubernetes.io/projected/9352dffa-7117-42a7-b012-a78eee3dae3a-kube-api-access-scgdn\") pod \"keystone-db-create-czmmk\" (UID: \"9352dffa-7117-42a7-b012-a78eee3dae3a\") " pod="openstack/keystone-db-create-czmmk" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.469434 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scgdn\" (UniqueName: \"kubernetes.io/projected/9352dffa-7117-42a7-b012-a78eee3dae3a-kube-api-access-scgdn\") pod \"keystone-db-create-czmmk\" (UID: \"9352dffa-7117-42a7-b012-a78eee3dae3a\") " pod="openstack/keystone-db-create-czmmk" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.513550 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-9shm5"] Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.515421 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9shm5" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.521296 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-czmmk" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.523050 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9shm5"] Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.657851 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbjgg\" (UniqueName: \"kubernetes.io/projected/8e0d7597-7e74-4b63-a689-d545690e3715-kube-api-access-dbjgg\") pod \"placement-db-create-9shm5\" (UID: \"8e0d7597-7e74-4b63-a689-d545690e3715\") " pod="openstack/placement-db-create-9shm5" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.760053 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbjgg\" (UniqueName: \"kubernetes.io/projected/8e0d7597-7e74-4b63-a689-d545690e3715-kube-api-access-dbjgg\") pod \"placement-db-create-9shm5\" (UID: \"8e0d7597-7e74-4b63-a689-d545690e3715\") " pod="openstack/placement-db-create-9shm5" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.782067 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbjgg\" (UniqueName: \"kubernetes.io/projected/8e0d7597-7e74-4b63-a689-d545690e3715-kube-api-access-dbjgg\") pod \"placement-db-create-9shm5\" (UID: \"8e0d7597-7e74-4b63-a689-d545690e3715\") " pod="openstack/placement-db-create-9shm5" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.838532 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9shm5" Dec 13 03:28:39 crc kubenswrapper[5070]: I1213 03:28:39.992670 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-czmmk"] Dec 13 03:28:40 crc kubenswrapper[5070]: I1213 03:28:40.012523 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-czmmk" event={"ID":"9352dffa-7117-42a7-b012-a78eee3dae3a","Type":"ContainerStarted","Data":"e26166a624273d89783d7c8eeeb58a2ae48548133ca7bed4df6fab7565553aff"} Dec 13 03:28:40 crc kubenswrapper[5070]: I1213 03:28:40.322003 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9shm5"] Dec 13 03:28:40 crc kubenswrapper[5070]: W1213 03:28:40.324968 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e0d7597_7e74_4b63_a689_d545690e3715.slice/crio-e97e8a1076b1402d47ef07fc2dbec7df96bdfaad23677ddd22088dfd5e3ecaf0 WatchSource:0}: Error finding container e97e8a1076b1402d47ef07fc2dbec7df96bdfaad23677ddd22088dfd5e3ecaf0: Status 404 returned error can't find the container with id e97e8a1076b1402d47ef07fc2dbec7df96bdfaad23677ddd22088dfd5e3ecaf0 Dec 13 03:28:41 crc kubenswrapper[5070]: I1213 03:28:41.028984 5070 generic.go:334] "Generic (PLEG): container finished" podID="9352dffa-7117-42a7-b012-a78eee3dae3a" containerID="8a9c65ec4c5124014f4017580275eb5b077ce044324f4ad9d4abb648b5403875" exitCode=0 Dec 13 03:28:41 crc kubenswrapper[5070]: I1213 03:28:41.029377 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-czmmk" event={"ID":"9352dffa-7117-42a7-b012-a78eee3dae3a","Type":"ContainerDied","Data":"8a9c65ec4c5124014f4017580275eb5b077ce044324f4ad9d4abb648b5403875"} Dec 13 03:28:41 crc kubenswrapper[5070]: I1213 03:28:41.032592 5070 generic.go:334] "Generic (PLEG): container finished" podID="8e0d7597-7e74-4b63-a689-d545690e3715" containerID="e0fe988d1c0caf283d5376d630dfaa9a0b90a36bff6746aa3bc3248496519874" exitCode=0 Dec 13 03:28:41 crc kubenswrapper[5070]: I1213 03:28:41.032662 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9shm5" event={"ID":"8e0d7597-7e74-4b63-a689-d545690e3715","Type":"ContainerDied","Data":"e0fe988d1c0caf283d5376d630dfaa9a0b90a36bff6746aa3bc3248496519874"} Dec 13 03:28:41 crc kubenswrapper[5070]: I1213 03:28:41.032691 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9shm5" event={"ID":"8e0d7597-7e74-4b63-a689-d545690e3715","Type":"ContainerStarted","Data":"e97e8a1076b1402d47ef07fc2dbec7df96bdfaad23677ddd22088dfd5e3ecaf0"} Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.133700 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.282949 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9w6wn"] Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.283302 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" podUID="847376ab-5cb3-46ed-8b13-f0a21f09c135" containerName="dnsmasq-dns" containerID="cri-o://81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019" gracePeriod=10 Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.584750 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-czmmk" Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.597557 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9shm5" Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.706984 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbjgg\" (UniqueName: \"kubernetes.io/projected/8e0d7597-7e74-4b63-a689-d545690e3715-kube-api-access-dbjgg\") pod \"8e0d7597-7e74-4b63-a689-d545690e3715\" (UID: \"8e0d7597-7e74-4b63-a689-d545690e3715\") " Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.707240 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scgdn\" (UniqueName: \"kubernetes.io/projected/9352dffa-7117-42a7-b012-a78eee3dae3a-kube-api-access-scgdn\") pod \"9352dffa-7117-42a7-b012-a78eee3dae3a\" (UID: \"9352dffa-7117-42a7-b012-a78eee3dae3a\") " Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.713411 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9352dffa-7117-42a7-b012-a78eee3dae3a-kube-api-access-scgdn" (OuterVolumeSpecName: "kube-api-access-scgdn") pod "9352dffa-7117-42a7-b012-a78eee3dae3a" (UID: "9352dffa-7117-42a7-b012-a78eee3dae3a"). InnerVolumeSpecName "kube-api-access-scgdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.715293 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e0d7597-7e74-4b63-a689-d545690e3715-kube-api-access-dbjgg" (OuterVolumeSpecName: "kube-api-access-dbjgg") pod "8e0d7597-7e74-4b63-a689-d545690e3715" (UID: "8e0d7597-7e74-4b63-a689-d545690e3715"). InnerVolumeSpecName "kube-api-access-dbjgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.808481 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbjgg\" (UniqueName: \"kubernetes.io/projected/8e0d7597-7e74-4b63-a689-d545690e3715-kube-api-access-dbjgg\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:42 crc kubenswrapper[5070]: I1213 03:28:42.808523 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scgdn\" (UniqueName: \"kubernetes.io/projected/9352dffa-7117-42a7-b012-a78eee3dae3a-kube-api-access-scgdn\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.054867 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9shm5" event={"ID":"8e0d7597-7e74-4b63-a689-d545690e3715","Type":"ContainerDied","Data":"e97e8a1076b1402d47ef07fc2dbec7df96bdfaad23677ddd22088dfd5e3ecaf0"} Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.054906 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9shm5" Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.054925 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e97e8a1076b1402d47ef07fc2dbec7df96bdfaad23677ddd22088dfd5e3ecaf0" Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.056895 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-czmmk" event={"ID":"9352dffa-7117-42a7-b012-a78eee3dae3a","Type":"ContainerDied","Data":"e26166a624273d89783d7c8eeeb58a2ae48548133ca7bed4df6fab7565553aff"} Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.056919 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e26166a624273d89783d7c8eeeb58a2ae48548133ca7bed4df6fab7565553aff" Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.056964 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-czmmk" Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.784920 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.924757 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-dns-svc\") pod \"847376ab-5cb3-46ed-8b13-f0a21f09c135\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.924818 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhwrf\" (UniqueName: \"kubernetes.io/projected/847376ab-5cb3-46ed-8b13-f0a21f09c135-kube-api-access-vhwrf\") pod \"847376ab-5cb3-46ed-8b13-f0a21f09c135\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.924958 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-config\") pod \"847376ab-5cb3-46ed-8b13-f0a21f09c135\" (UID: \"847376ab-5cb3-46ed-8b13-f0a21f09c135\") " Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.929168 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/847376ab-5cb3-46ed-8b13-f0a21f09c135-kube-api-access-vhwrf" (OuterVolumeSpecName: "kube-api-access-vhwrf") pod "847376ab-5cb3-46ed-8b13-f0a21f09c135" (UID: "847376ab-5cb3-46ed-8b13-f0a21f09c135"). InnerVolumeSpecName "kube-api-access-vhwrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.962692 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-config" (OuterVolumeSpecName: "config") pod "847376ab-5cb3-46ed-8b13-f0a21f09c135" (UID: "847376ab-5cb3-46ed-8b13-f0a21f09c135"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:28:43 crc kubenswrapper[5070]: I1213 03:28:43.963370 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "847376ab-5cb3-46ed-8b13-f0a21f09c135" (UID: "847376ab-5cb3-46ed-8b13-f0a21f09c135"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.027240 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.027292 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhwrf\" (UniqueName: \"kubernetes.io/projected/847376ab-5cb3-46ed-8b13-f0a21f09c135-kube-api-access-vhwrf\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.027306 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/847376ab-5cb3-46ed-8b13-f0a21f09c135-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.064663 5070 generic.go:334] "Generic (PLEG): container finished" podID="847376ab-5cb3-46ed-8b13-f0a21f09c135" containerID="81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019" exitCode=0 Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.064706 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" event={"ID":"847376ab-5cb3-46ed-8b13-f0a21f09c135","Type":"ContainerDied","Data":"81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019"} Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.064736 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" event={"ID":"847376ab-5cb3-46ed-8b13-f0a21f09c135","Type":"ContainerDied","Data":"bbdf6ce710ce34b56a2f47646177c2340fb8ed08e7e0ab3c2d3646e35f0f3834"} Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.064759 5070 scope.go:117] "RemoveContainer" containerID="81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.064763 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-9w6wn" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.079921 5070 scope.go:117] "RemoveContainer" containerID="bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.099667 5070 scope.go:117] "RemoveContainer" containerID="81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019" Dec 13 03:28:44 crc kubenswrapper[5070]: E1213 03:28:44.100167 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019\": container with ID starting with 81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019 not found: ID does not exist" containerID="81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.100242 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019"} err="failed to get container status \"81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019\": rpc error: code = NotFound desc = could not find container \"81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019\": container with ID starting with 81a44503ba3a048f1181e7351e805361a0c483253fbeaf87779edb72390d1019 not found: ID does not exist" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.100291 5070 scope.go:117] "RemoveContainer" containerID="bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.100459 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9w6wn"] Dec 13 03:28:44 crc kubenswrapper[5070]: E1213 03:28:44.100712 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5\": container with ID starting with bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5 not found: ID does not exist" containerID="bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.100743 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5"} err="failed to get container status \"bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5\": rpc error: code = NotFound desc = could not find container \"bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5\": container with ID starting with bf35365a956f25d5071e0c7f88c93693b82c466dc535610ec6a0349ea382dbe5 not found: ID does not exist" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.108360 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9w6wn"] Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.182351 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="847376ab-5cb3-46ed-8b13-f0a21f09c135" path="/var/lib/kubelet/pods/847376ab-5cb3-46ed-8b13-f0a21f09c135/volumes" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.949740 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-9dbc-account-create-9dwbb"] Dec 13 03:28:44 crc kubenswrapper[5070]: E1213 03:28:44.950298 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9352dffa-7117-42a7-b012-a78eee3dae3a" containerName="mariadb-database-create" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.950310 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9352dffa-7117-42a7-b012-a78eee3dae3a" containerName="mariadb-database-create" Dec 13 03:28:44 crc kubenswrapper[5070]: E1213 03:28:44.950323 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847376ab-5cb3-46ed-8b13-f0a21f09c135" containerName="init" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.950330 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="847376ab-5cb3-46ed-8b13-f0a21f09c135" containerName="init" Dec 13 03:28:44 crc kubenswrapper[5070]: E1213 03:28:44.950339 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e0d7597-7e74-4b63-a689-d545690e3715" containerName="mariadb-database-create" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.950345 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e0d7597-7e74-4b63-a689-d545690e3715" containerName="mariadb-database-create" Dec 13 03:28:44 crc kubenswrapper[5070]: E1213 03:28:44.950363 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847376ab-5cb3-46ed-8b13-f0a21f09c135" containerName="dnsmasq-dns" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.950369 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="847376ab-5cb3-46ed-8b13-f0a21f09c135" containerName="dnsmasq-dns" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.950535 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e0d7597-7e74-4b63-a689-d545690e3715" containerName="mariadb-database-create" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.950549 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="9352dffa-7117-42a7-b012-a78eee3dae3a" containerName="mariadb-database-create" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.950558 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="847376ab-5cb3-46ed-8b13-f0a21f09c135" containerName="dnsmasq-dns" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.951035 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9dbc-account-create-9dwbb" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.953316 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 13 03:28:44 crc kubenswrapper[5070]: I1213 03:28:44.963375 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9dbc-account-create-9dwbb"] Dec 13 03:28:45 crc kubenswrapper[5070]: I1213 03:28:45.041312 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xc5fq\" (UniqueName: \"kubernetes.io/projected/d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6-kube-api-access-xc5fq\") pod \"glance-9dbc-account-create-9dwbb\" (UID: \"d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6\") " pod="openstack/glance-9dbc-account-create-9dwbb" Dec 13 03:28:45 crc kubenswrapper[5070]: I1213 03:28:45.142415 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xc5fq\" (UniqueName: \"kubernetes.io/projected/d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6-kube-api-access-xc5fq\") pod \"glance-9dbc-account-create-9dwbb\" (UID: \"d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6\") " pod="openstack/glance-9dbc-account-create-9dwbb" Dec 13 03:28:45 crc kubenswrapper[5070]: I1213 03:28:45.166170 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xc5fq\" (UniqueName: \"kubernetes.io/projected/d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6-kube-api-access-xc5fq\") pod \"glance-9dbc-account-create-9dwbb\" (UID: \"d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6\") " pod="openstack/glance-9dbc-account-create-9dwbb" Dec 13 03:28:45 crc kubenswrapper[5070]: I1213 03:28:45.265632 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9dbc-account-create-9dwbb" Dec 13 03:28:45 crc kubenswrapper[5070]: I1213 03:28:45.926888 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9dbc-account-create-9dwbb"] Dec 13 03:28:45 crc kubenswrapper[5070]: W1213 03:28:45.936132 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd51c8da0_0e5a_42d2_9a28_4e4ba8c534d6.slice/crio-982b1da5ef06f5ccf235b95c5edefaf963d51a5f49c755ab118a45a76663450b WatchSource:0}: Error finding container 982b1da5ef06f5ccf235b95c5edefaf963d51a5f49c755ab118a45a76663450b: Status 404 returned error can't find the container with id 982b1da5ef06f5ccf235b95c5edefaf963d51a5f49c755ab118a45a76663450b Dec 13 03:28:46 crc kubenswrapper[5070]: I1213 03:28:46.084539 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9dbc-account-create-9dwbb" event={"ID":"d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6","Type":"ContainerStarted","Data":"982b1da5ef06f5ccf235b95c5edefaf963d51a5f49c755ab118a45a76663450b"} Dec 13 03:28:47 crc kubenswrapper[5070]: I1213 03:28:47.097634 5070 generic.go:334] "Generic (PLEG): container finished" podID="d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6" containerID="23812a8e52b8e978b001ee1bbff5c9a499552de5b5ef7b86f3800092d9125a28" exitCode=0 Dec 13 03:28:47 crc kubenswrapper[5070]: I1213 03:28:47.097847 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9dbc-account-create-9dwbb" event={"ID":"d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6","Type":"ContainerDied","Data":"23812a8e52b8e978b001ee1bbff5c9a499552de5b5ef7b86f3800092d9125a28"} Dec 13 03:28:48 crc kubenswrapper[5070]: I1213 03:28:48.504898 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9dbc-account-create-9dwbb" Dec 13 03:28:48 crc kubenswrapper[5070]: I1213 03:28:48.629845 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xc5fq\" (UniqueName: \"kubernetes.io/projected/d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6-kube-api-access-xc5fq\") pod \"d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6\" (UID: \"d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6\") " Dec 13 03:28:48 crc kubenswrapper[5070]: I1213 03:28:48.634759 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6-kube-api-access-xc5fq" (OuterVolumeSpecName: "kube-api-access-xc5fq") pod "d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6" (UID: "d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6"). InnerVolumeSpecName "kube-api-access-xc5fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:48 crc kubenswrapper[5070]: I1213 03:28:48.731990 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xc5fq\" (UniqueName: \"kubernetes.io/projected/d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6-kube-api-access-xc5fq\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:48 crc kubenswrapper[5070]: I1213 03:28:48.753214 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.115389 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9dbc-account-create-9dwbb" event={"ID":"d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6","Type":"ContainerDied","Data":"982b1da5ef06f5ccf235b95c5edefaf963d51a5f49c755ab118a45a76663450b"} Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.115713 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="982b1da5ef06f5ccf235b95c5edefaf963d51a5f49c755ab118a45a76663450b" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.115432 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9dbc-account-create-9dwbb" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.346368 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-54d5-account-create-nh9pv"] Dec 13 03:28:49 crc kubenswrapper[5070]: E1213 03:28:49.346870 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6" containerName="mariadb-account-create" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.346892 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6" containerName="mariadb-account-create" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.347108 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6" containerName="mariadb-account-create" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.347918 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-54d5-account-create-nh9pv" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.350332 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.361694 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-54d5-account-create-nh9pv"] Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.545209 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj72d\" (UniqueName: \"kubernetes.io/projected/cd03ee19-a6d8-4999-8b43-d31b4d159a8d-kube-api-access-kj72d\") pod \"keystone-54d5-account-create-nh9pv\" (UID: \"cd03ee19-a6d8-4999-8b43-d31b4d159a8d\") " pod="openstack/keystone-54d5-account-create-nh9pv" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.646764 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj72d\" (UniqueName: \"kubernetes.io/projected/cd03ee19-a6d8-4999-8b43-d31b4d159a8d-kube-api-access-kj72d\") pod \"keystone-54d5-account-create-nh9pv\" (UID: \"cd03ee19-a6d8-4999-8b43-d31b4d159a8d\") " pod="openstack/keystone-54d5-account-create-nh9pv" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.647932 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-70a4-account-create-xgzss"] Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.648999 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70a4-account-create-xgzss" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.650691 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.659159 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-70a4-account-create-xgzss"] Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.672669 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj72d\" (UniqueName: \"kubernetes.io/projected/cd03ee19-a6d8-4999-8b43-d31b4d159a8d-kube-api-access-kj72d\") pod \"keystone-54d5-account-create-nh9pv\" (UID: \"cd03ee19-a6d8-4999-8b43-d31b4d159a8d\") " pod="openstack/keystone-54d5-account-create-nh9pv" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.850363 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkq6r\" (UniqueName: \"kubernetes.io/projected/578e1316-3a47-4516-89e8-59f936a22495-kube-api-access-gkq6r\") pod \"placement-70a4-account-create-xgzss\" (UID: \"578e1316-3a47-4516-89e8-59f936a22495\") " pod="openstack/placement-70a4-account-create-xgzss" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.953572 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkq6r\" (UniqueName: \"kubernetes.io/projected/578e1316-3a47-4516-89e8-59f936a22495-kube-api-access-gkq6r\") pod \"placement-70a4-account-create-xgzss\" (UID: \"578e1316-3a47-4516-89e8-59f936a22495\") " pod="openstack/placement-70a4-account-create-xgzss" Dec 13 03:28:49 crc kubenswrapper[5070]: I1213 03:28:49.967113 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-54d5-account-create-nh9pv" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.010564 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkq6r\" (UniqueName: \"kubernetes.io/projected/578e1316-3a47-4516-89e8-59f936a22495-kube-api-access-gkq6r\") pod \"placement-70a4-account-create-xgzss\" (UID: \"578e1316-3a47-4516-89e8-59f936a22495\") " pod="openstack/placement-70a4-account-create-xgzss" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.061671 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-vv4wt"] Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.063219 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.074842 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-7z77k" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.075179 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.086161 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-vv4wt"] Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.158461 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-config-data\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.158571 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djrdj\" (UniqueName: \"kubernetes.io/projected/682caa0d-3a80-41aa-a899-8613f8454481-kube-api-access-djrdj\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.158631 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-db-sync-config-data\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.158753 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-combined-ca-bundle\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.259382 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-config-data\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.259939 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djrdj\" (UniqueName: \"kubernetes.io/projected/682caa0d-3a80-41aa-a899-8613f8454481-kube-api-access-djrdj\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.259993 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-db-sync-config-data\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.260043 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-combined-ca-bundle\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.264089 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-config-data\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.264596 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-db-sync-config-data\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.273953 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70a4-account-create-xgzss" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.276089 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-combined-ca-bundle\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.282984 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djrdj\" (UniqueName: \"kubernetes.io/projected/682caa0d-3a80-41aa-a899-8613f8454481-kube-api-access-djrdj\") pod \"glance-db-sync-vv4wt\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.420744 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vv4wt" Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.523495 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-54d5-account-create-nh9pv"] Dec 13 03:28:50 crc kubenswrapper[5070]: W1213 03:28:50.584411 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd03ee19_a6d8_4999_8b43_d31b4d159a8d.slice/crio-e84e478f0f6f0984035b2c8d7dcd630f458cadf26f5d7a1d9eb1b9cff7d49f61 WatchSource:0}: Error finding container e84e478f0f6f0984035b2c8d7dcd630f458cadf26f5d7a1d9eb1b9cff7d49f61: Status 404 returned error can't find the container with id e84e478f0f6f0984035b2c8d7dcd630f458cadf26f5d7a1d9eb1b9cff7d49f61 Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.781593 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-70a4-account-create-xgzss"] Dec 13 03:28:50 crc kubenswrapper[5070]: I1213 03:28:50.898993 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-vv4wt"] Dec 13 03:28:51 crc kubenswrapper[5070]: I1213 03:28:51.146410 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-70a4-account-create-xgzss" event={"ID":"578e1316-3a47-4516-89e8-59f936a22495","Type":"ContainerStarted","Data":"e89afb4f6fb9b76fa128e4afaa32a8f8a812c4c6eaed22d41cf660d9acaa0edd"} Dec 13 03:28:51 crc kubenswrapper[5070]: I1213 03:28:51.148473 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-54d5-account-create-nh9pv" event={"ID":"cd03ee19-a6d8-4999-8b43-d31b4d159a8d","Type":"ContainerStarted","Data":"e84e478f0f6f0984035b2c8d7dcd630f458cadf26f5d7a1d9eb1b9cff7d49f61"} Dec 13 03:28:51 crc kubenswrapper[5070]: I1213 03:28:51.149987 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vv4wt" event={"ID":"682caa0d-3a80-41aa-a899-8613f8454481","Type":"ContainerStarted","Data":"035a85ca2cb3e0a4c7a046370d04a08a93832d84d421f66cb3a5c8ae8ee1ef4e"} Dec 13 03:28:51 crc kubenswrapper[5070]: I1213 03:28:51.942554 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:28:51 crc kubenswrapper[5070]: I1213 03:28:51.942853 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:28:52 crc kubenswrapper[5070]: I1213 03:28:52.159908 5070 generic.go:334] "Generic (PLEG): container finished" podID="578e1316-3a47-4516-89e8-59f936a22495" containerID="3517bde0e941ace8c1b85b02564c0bed0194614f4b28d4c3e2f1dde8afe4f9ab" exitCode=0 Dec 13 03:28:52 crc kubenswrapper[5070]: I1213 03:28:52.159986 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-70a4-account-create-xgzss" event={"ID":"578e1316-3a47-4516-89e8-59f936a22495","Type":"ContainerDied","Data":"3517bde0e941ace8c1b85b02564c0bed0194614f4b28d4c3e2f1dde8afe4f9ab"} Dec 13 03:28:52 crc kubenswrapper[5070]: I1213 03:28:52.161863 5070 generic.go:334] "Generic (PLEG): container finished" podID="cd03ee19-a6d8-4999-8b43-d31b4d159a8d" containerID="7e6e604e38bcde0bd3acce10218073e9f4f08f51f8e71dffa76e6cf73de8f1ec" exitCode=0 Dec 13 03:28:52 crc kubenswrapper[5070]: I1213 03:28:52.161894 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-54d5-account-create-nh9pv" event={"ID":"cd03ee19-a6d8-4999-8b43-d31b4d159a8d","Type":"ContainerDied","Data":"7e6e604e38bcde0bd3acce10218073e9f4f08f51f8e71dffa76e6cf73de8f1ec"} Dec 13 03:28:53 crc kubenswrapper[5070]: I1213 03:28:53.677632 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-54d5-account-create-nh9pv" Dec 13 03:28:53 crc kubenswrapper[5070]: I1213 03:28:53.681261 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70a4-account-create-xgzss" Dec 13 03:28:53 crc kubenswrapper[5070]: I1213 03:28:53.707767 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkq6r\" (UniqueName: \"kubernetes.io/projected/578e1316-3a47-4516-89e8-59f936a22495-kube-api-access-gkq6r\") pod \"578e1316-3a47-4516-89e8-59f936a22495\" (UID: \"578e1316-3a47-4516-89e8-59f936a22495\") " Dec 13 03:28:53 crc kubenswrapper[5070]: I1213 03:28:53.707813 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kj72d\" (UniqueName: \"kubernetes.io/projected/cd03ee19-a6d8-4999-8b43-d31b4d159a8d-kube-api-access-kj72d\") pod \"cd03ee19-a6d8-4999-8b43-d31b4d159a8d\" (UID: \"cd03ee19-a6d8-4999-8b43-d31b4d159a8d\") " Dec 13 03:28:53 crc kubenswrapper[5070]: I1213 03:28:53.715244 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd03ee19-a6d8-4999-8b43-d31b4d159a8d-kube-api-access-kj72d" (OuterVolumeSpecName: "kube-api-access-kj72d") pod "cd03ee19-a6d8-4999-8b43-d31b4d159a8d" (UID: "cd03ee19-a6d8-4999-8b43-d31b4d159a8d"). InnerVolumeSpecName "kube-api-access-kj72d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:53 crc kubenswrapper[5070]: I1213 03:28:53.716639 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/578e1316-3a47-4516-89e8-59f936a22495-kube-api-access-gkq6r" (OuterVolumeSpecName: "kube-api-access-gkq6r") pod "578e1316-3a47-4516-89e8-59f936a22495" (UID: "578e1316-3a47-4516-89e8-59f936a22495"). InnerVolumeSpecName "kube-api-access-gkq6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:28:53 crc kubenswrapper[5070]: I1213 03:28:53.809130 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkq6r\" (UniqueName: \"kubernetes.io/projected/578e1316-3a47-4516-89e8-59f936a22495-kube-api-access-gkq6r\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:53 crc kubenswrapper[5070]: I1213 03:28:53.809162 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kj72d\" (UniqueName: \"kubernetes.io/projected/cd03ee19-a6d8-4999-8b43-d31b4d159a8d-kube-api-access-kj72d\") on node \"crc\" DevicePath \"\"" Dec 13 03:28:54 crc kubenswrapper[5070]: I1213 03:28:54.176712 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70a4-account-create-xgzss" Dec 13 03:28:54 crc kubenswrapper[5070]: I1213 03:28:54.178185 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-54d5-account-create-nh9pv" Dec 13 03:28:54 crc kubenswrapper[5070]: I1213 03:28:54.179301 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-70a4-account-create-xgzss" event={"ID":"578e1316-3a47-4516-89e8-59f936a22495","Type":"ContainerDied","Data":"e89afb4f6fb9b76fa128e4afaa32a8f8a812c4c6eaed22d41cf660d9acaa0edd"} Dec 13 03:28:54 crc kubenswrapper[5070]: I1213 03:28:54.179339 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e89afb4f6fb9b76fa128e4afaa32a8f8a812c4c6eaed22d41cf660d9acaa0edd" Dec 13 03:28:54 crc kubenswrapper[5070]: I1213 03:28:54.179353 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-54d5-account-create-nh9pv" event={"ID":"cd03ee19-a6d8-4999-8b43-d31b4d159a8d","Type":"ContainerDied","Data":"e84e478f0f6f0984035b2c8d7dcd630f458cadf26f5d7a1d9eb1b9cff7d49f61"} Dec 13 03:28:54 crc kubenswrapper[5070]: I1213 03:28:54.179368 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e84e478f0f6f0984035b2c8d7dcd630f458cadf26f5d7a1d9eb1b9cff7d49f61" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.288749 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-x4xfl" podUID="fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4" containerName="ovn-controller" probeResult="failure" output=< Dec 13 03:28:59 crc kubenswrapper[5070]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 13 03:28:59 crc kubenswrapper[5070]: > Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.317374 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.317540 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-zmbl5" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.554257 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-x4xfl-config-jg2sk"] Dec 13 03:28:59 crc kubenswrapper[5070]: E1213 03:28:59.563822 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd03ee19-a6d8-4999-8b43-d31b4d159a8d" containerName="mariadb-account-create" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.564099 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd03ee19-a6d8-4999-8b43-d31b4d159a8d" containerName="mariadb-account-create" Dec 13 03:28:59 crc kubenswrapper[5070]: E1213 03:28:59.564220 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578e1316-3a47-4516-89e8-59f936a22495" containerName="mariadb-account-create" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.564300 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="578e1316-3a47-4516-89e8-59f936a22495" containerName="mariadb-account-create" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.564615 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="578e1316-3a47-4516-89e8-59f936a22495" containerName="mariadb-account-create" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.564737 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd03ee19-a6d8-4999-8b43-d31b4d159a8d" containerName="mariadb-account-create" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.565425 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.569185 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-x4xfl-config-jg2sk"] Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.569654 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.603893 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-log-ovn\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.604079 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-additional-scripts\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.604116 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znt2g\" (UniqueName: \"kubernetes.io/projected/46fb8b43-4154-4f57-87cf-ad035cdb2408-kube-api-access-znt2g\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.604202 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.604238 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run-ovn\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.604432 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-scripts\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.705511 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-scripts\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.705566 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-log-ovn\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.705629 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-additional-scripts\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.705649 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znt2g\" (UniqueName: \"kubernetes.io/projected/46fb8b43-4154-4f57-87cf-ad035cdb2408-kube-api-access-znt2g\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.705692 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.705712 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run-ovn\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.706031 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run-ovn\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.706067 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.706071 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-log-ovn\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.707087 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-additional-scripts\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.707618 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-scripts\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.740814 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znt2g\" (UniqueName: \"kubernetes.io/projected/46fb8b43-4154-4f57-87cf-ad035cdb2408-kube-api-access-znt2g\") pod \"ovn-controller-x4xfl-config-jg2sk\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:28:59 crc kubenswrapper[5070]: I1213 03:28:59.890043 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:29:04 crc kubenswrapper[5070]: I1213 03:29:04.290233 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-x4xfl" podUID="fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4" containerName="ovn-controller" probeResult="failure" output=< Dec 13 03:29:04 crc kubenswrapper[5070]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 13 03:29:04 crc kubenswrapper[5070]: > Dec 13 03:29:06 crc kubenswrapper[5070]: E1213 03:29:06.089261 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 13 03:29:06 crc kubenswrapper[5070]: E1213 03:29:06.089929 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-djrdj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-vv4wt_openstack(682caa0d-3a80-41aa-a899-8613f8454481): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:29:06 crc kubenswrapper[5070]: E1213 03:29:06.091206 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-vv4wt" podUID="682caa0d-3a80-41aa-a899-8613f8454481" Dec 13 03:29:06 crc kubenswrapper[5070]: I1213 03:29:06.292757 5070 generic.go:334] "Generic (PLEG): container finished" podID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" containerID="6c4ef47d6ecc0948e5e89e4b0a9e280fb1e18267e47c12f63307114b9cf1d371" exitCode=0 Dec 13 03:29:06 crc kubenswrapper[5070]: I1213 03:29:06.292863 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9dd13bd9-bfbd-4f80-b334-d8b959a6187d","Type":"ContainerDied","Data":"6c4ef47d6ecc0948e5e89e4b0a9e280fb1e18267e47c12f63307114b9cf1d371"} Dec 13 03:29:06 crc kubenswrapper[5070]: I1213 03:29:06.295074 5070 generic.go:334] "Generic (PLEG): container finished" podID="d669856e-7406-451a-825e-9de1fc76f8b2" containerID="a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782" exitCode=0 Dec 13 03:29:06 crc kubenswrapper[5070]: I1213 03:29:06.295158 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d669856e-7406-451a-825e-9de1fc76f8b2","Type":"ContainerDied","Data":"a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782"} Dec 13 03:29:06 crc kubenswrapper[5070]: E1213 03:29:06.298064 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-vv4wt" podUID="682caa0d-3a80-41aa-a899-8613f8454481" Dec 13 03:29:06 crc kubenswrapper[5070]: I1213 03:29:06.507685 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-x4xfl-config-jg2sk"] Dec 13 03:29:06 crc kubenswrapper[5070]: W1213 03:29:06.512098 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46fb8b43_4154_4f57_87cf_ad035cdb2408.slice/crio-eb3403ed35041f7ecff1e63ede7ec84d234d0bf81778caa9f9e951e8a141e043 WatchSource:0}: Error finding container eb3403ed35041f7ecff1e63ede7ec84d234d0bf81778caa9f9e951e8a141e043: Status 404 returned error can't find the container with id eb3403ed35041f7ecff1e63ede7ec84d234d0bf81778caa9f9e951e8a141e043 Dec 13 03:29:07 crc kubenswrapper[5070]: I1213 03:29:07.304824 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9dd13bd9-bfbd-4f80-b334-d8b959a6187d","Type":"ContainerStarted","Data":"33003ab2fb9eb934abf617d765789be5f82404c9f0f62b664f7771d0588edc3c"} Dec 13 03:29:07 crc kubenswrapper[5070]: I1213 03:29:07.305389 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 13 03:29:07 crc kubenswrapper[5070]: I1213 03:29:07.306639 5070 generic.go:334] "Generic (PLEG): container finished" podID="46fb8b43-4154-4f57-87cf-ad035cdb2408" containerID="e6450eacfbe9fc1a61285d8cfc0d7bf6cf4651537676991d6c2c308e159a158a" exitCode=0 Dec 13 03:29:07 crc kubenswrapper[5070]: I1213 03:29:07.306697 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-x4xfl-config-jg2sk" event={"ID":"46fb8b43-4154-4f57-87cf-ad035cdb2408","Type":"ContainerDied","Data":"e6450eacfbe9fc1a61285d8cfc0d7bf6cf4651537676991d6c2c308e159a158a"} Dec 13 03:29:07 crc kubenswrapper[5070]: I1213 03:29:07.306717 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-x4xfl-config-jg2sk" event={"ID":"46fb8b43-4154-4f57-87cf-ad035cdb2408","Type":"ContainerStarted","Data":"eb3403ed35041f7ecff1e63ede7ec84d234d0bf81778caa9f9e951e8a141e043"} Dec 13 03:29:07 crc kubenswrapper[5070]: I1213 03:29:07.309544 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d669856e-7406-451a-825e-9de1fc76f8b2","Type":"ContainerStarted","Data":"ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26"} Dec 13 03:29:07 crc kubenswrapper[5070]: I1213 03:29:07.309979 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:29:07 crc kubenswrapper[5070]: I1213 03:29:07.330391 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.360801126 podStartE2EDuration="1m13.330367096s" podCreationTimestamp="2025-12-13 03:27:54 +0000 UTC" firstStartedPulling="2025-12-13 03:27:56.830347834 +0000 UTC m=+969.066191380" lastFinishedPulling="2025-12-13 03:28:31.799913804 +0000 UTC m=+1004.035757350" observedRunningTime="2025-12-13 03:29:07.325498044 +0000 UTC m=+1039.561341600" watchObservedRunningTime="2025-12-13 03:29:07.330367096 +0000 UTC m=+1039.566210642" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.668909 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.696157 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371963.158642 podStartE2EDuration="1m13.696134352s" podCreationTimestamp="2025-12-13 03:27:55 +0000 UTC" firstStartedPulling="2025-12-13 03:27:57.030570664 +0000 UTC m=+969.266414210" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:29:07.381098015 +0000 UTC m=+1039.616941561" watchObservedRunningTime="2025-12-13 03:29:08.696134352 +0000 UTC m=+1040.931977908" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.785740 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run-ovn\") pod \"46fb8b43-4154-4f57-87cf-ad035cdb2408\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.785868 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-additional-scripts\") pod \"46fb8b43-4154-4f57-87cf-ad035cdb2408\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.785932 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-log-ovn\") pod \"46fb8b43-4154-4f57-87cf-ad035cdb2408\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.785968 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-scripts\") pod \"46fb8b43-4154-4f57-87cf-ad035cdb2408\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.785991 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run\") pod \"46fb8b43-4154-4f57-87cf-ad035cdb2408\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.786039 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znt2g\" (UniqueName: \"kubernetes.io/projected/46fb8b43-4154-4f57-87cf-ad035cdb2408-kube-api-access-znt2g\") pod \"46fb8b43-4154-4f57-87cf-ad035cdb2408\" (UID: \"46fb8b43-4154-4f57-87cf-ad035cdb2408\") " Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.786761 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "46fb8b43-4154-4f57-87cf-ad035cdb2408" (UID: "46fb8b43-4154-4f57-87cf-ad035cdb2408"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.787307 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "46fb8b43-4154-4f57-87cf-ad035cdb2408" (UID: "46fb8b43-4154-4f57-87cf-ad035cdb2408"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.787341 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run" (OuterVolumeSpecName: "var-run") pod "46fb8b43-4154-4f57-87cf-ad035cdb2408" (UID: "46fb8b43-4154-4f57-87cf-ad035cdb2408"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.787363 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "46fb8b43-4154-4f57-87cf-ad035cdb2408" (UID: "46fb8b43-4154-4f57-87cf-ad035cdb2408"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.788004 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-scripts" (OuterVolumeSpecName: "scripts") pod "46fb8b43-4154-4f57-87cf-ad035cdb2408" (UID: "46fb8b43-4154-4f57-87cf-ad035cdb2408"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.791637 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46fb8b43-4154-4f57-87cf-ad035cdb2408-kube-api-access-znt2g" (OuterVolumeSpecName: "kube-api-access-znt2g") pod "46fb8b43-4154-4f57-87cf-ad035cdb2408" (UID: "46fb8b43-4154-4f57-87cf-ad035cdb2408"). InnerVolumeSpecName "kube-api-access-znt2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.888217 5070 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.888255 5070 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.888267 5070 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.888275 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46fb8b43-4154-4f57-87cf-ad035cdb2408-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.888286 5070 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46fb8b43-4154-4f57-87cf-ad035cdb2408-var-run\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:08 crc kubenswrapper[5070]: I1213 03:29:08.888296 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znt2g\" (UniqueName: \"kubernetes.io/projected/46fb8b43-4154-4f57-87cf-ad035cdb2408-kube-api-access-znt2g\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:09 crc kubenswrapper[5070]: I1213 03:29:09.296591 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-x4xfl" Dec 13 03:29:09 crc kubenswrapper[5070]: I1213 03:29:09.326982 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-x4xfl-config-jg2sk" event={"ID":"46fb8b43-4154-4f57-87cf-ad035cdb2408","Type":"ContainerDied","Data":"eb3403ed35041f7ecff1e63ede7ec84d234d0bf81778caa9f9e951e8a141e043"} Dec 13 03:29:09 crc kubenswrapper[5070]: I1213 03:29:09.327022 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb3403ed35041f7ecff1e63ede7ec84d234d0bf81778caa9f9e951e8a141e043" Dec 13 03:29:09 crc kubenswrapper[5070]: I1213 03:29:09.327045 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-x4xfl-config-jg2sk" Dec 13 03:29:09 crc kubenswrapper[5070]: I1213 03:29:09.771985 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-x4xfl-config-jg2sk"] Dec 13 03:29:09 crc kubenswrapper[5070]: I1213 03:29:09.778431 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-x4xfl-config-jg2sk"] Dec 13 03:29:10 crc kubenswrapper[5070]: I1213 03:29:10.176535 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46fb8b43-4154-4f57-87cf-ad035cdb2408" path="/var/lib/kubelet/pods/46fb8b43-4154-4f57-87cf-ad035cdb2408/volumes" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.189688 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.463747 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.690155 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-zl6vp"] Dec 13 03:29:16 crc kubenswrapper[5070]: E1213 03:29:16.690642 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46fb8b43-4154-4f57-87cf-ad035cdb2408" containerName="ovn-config" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.690677 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="46fb8b43-4154-4f57-87cf-ad035cdb2408" containerName="ovn-config" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.690834 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="46fb8b43-4154-4f57-87cf-ad035cdb2408" containerName="ovn-config" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.691362 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-zl6vp" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.699055 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-zl6vp"] Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.795849 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-6pt28"] Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.798368 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6pt28" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.805798 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6pt28"] Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.823091 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4kkg\" (UniqueName: \"kubernetes.io/projected/9484e5db-7a78-4875-adfc-274639497868-kube-api-access-c4kkg\") pod \"cinder-db-create-zl6vp\" (UID: \"9484e5db-7a78-4875-adfc-274639497868\") " pod="openstack/cinder-db-create-zl6vp" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.924263 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmgpz\" (UniqueName: \"kubernetes.io/projected/cd5e6065-7d41-43e2-974f-820e716a8f73-kube-api-access-fmgpz\") pod \"barbican-db-create-6pt28\" (UID: \"cd5e6065-7d41-43e2-974f-820e716a8f73\") " pod="openstack/barbican-db-create-6pt28" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.924793 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4kkg\" (UniqueName: \"kubernetes.io/projected/9484e5db-7a78-4875-adfc-274639497868-kube-api-access-c4kkg\") pod \"cinder-db-create-zl6vp\" (UID: \"9484e5db-7a78-4875-adfc-274639497868\") " pod="openstack/cinder-db-create-zl6vp" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.958360 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-gxzdc"] Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.959435 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.960852 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4kkg\" (UniqueName: \"kubernetes.io/projected/9484e5db-7a78-4875-adfc-274639497868-kube-api-access-c4kkg\") pod \"cinder-db-create-zl6vp\" (UID: \"9484e5db-7a78-4875-adfc-274639497868\") " pod="openstack/cinder-db-create-zl6vp" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.964282 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.965066 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.965308 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.968485 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-g4vc4" Dec 13 03:29:16 crc kubenswrapper[5070]: I1213 03:29:16.975057 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gxzdc"] Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.012230 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-w4rjq"] Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.013337 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-w4rjq" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.020077 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-w4rjq"] Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.026602 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmgpz\" (UniqueName: \"kubernetes.io/projected/cd5e6065-7d41-43e2-974f-820e716a8f73-kube-api-access-fmgpz\") pod \"barbican-db-create-6pt28\" (UID: \"cd5e6065-7d41-43e2-974f-820e716a8f73\") " pod="openstack/barbican-db-create-6pt28" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.026670 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-combined-ca-bundle\") pod \"keystone-db-sync-gxzdc\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.026754 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-config-data\") pod \"keystone-db-sync-gxzdc\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.026798 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpdz8\" (UniqueName: \"kubernetes.io/projected/5a821721-8311-49af-872d-7ea63f878c61-kube-api-access-cpdz8\") pod \"keystone-db-sync-gxzdc\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.047142 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmgpz\" (UniqueName: \"kubernetes.io/projected/cd5e6065-7d41-43e2-974f-820e716a8f73-kube-api-access-fmgpz\") pod \"barbican-db-create-6pt28\" (UID: \"cd5e6065-7d41-43e2-974f-820e716a8f73\") " pod="openstack/barbican-db-create-6pt28" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.066808 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-zl6vp" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.118884 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6pt28" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.128242 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-config-data\") pod \"keystone-db-sync-gxzdc\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.128321 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpdz8\" (UniqueName: \"kubernetes.io/projected/5a821721-8311-49af-872d-7ea63f878c61-kube-api-access-cpdz8\") pod \"keystone-db-sync-gxzdc\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.128381 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wh2n\" (UniqueName: \"kubernetes.io/projected/b7cf4d27-18ae-4936-b68c-0c0bc779e5ba-kube-api-access-9wh2n\") pod \"neutron-db-create-w4rjq\" (UID: \"b7cf4d27-18ae-4936-b68c-0c0bc779e5ba\") " pod="openstack/neutron-db-create-w4rjq" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.128465 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-combined-ca-bundle\") pod \"keystone-db-sync-gxzdc\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.135971 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-combined-ca-bundle\") pod \"keystone-db-sync-gxzdc\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.144136 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-config-data\") pod \"keystone-db-sync-gxzdc\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.147248 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpdz8\" (UniqueName: \"kubernetes.io/projected/5a821721-8311-49af-872d-7ea63f878c61-kube-api-access-cpdz8\") pod \"keystone-db-sync-gxzdc\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.316508 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wh2n\" (UniqueName: \"kubernetes.io/projected/b7cf4d27-18ae-4936-b68c-0c0bc779e5ba-kube-api-access-9wh2n\") pod \"neutron-db-create-w4rjq\" (UID: \"b7cf4d27-18ae-4936-b68c-0c0bc779e5ba\") " pod="openstack/neutron-db-create-w4rjq" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.324557 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.356262 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wh2n\" (UniqueName: \"kubernetes.io/projected/b7cf4d27-18ae-4936-b68c-0c0bc779e5ba-kube-api-access-9wh2n\") pod \"neutron-db-create-w4rjq\" (UID: \"b7cf4d27-18ae-4936-b68c-0c0bc779e5ba\") " pod="openstack/neutron-db-create-w4rjq" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.640594 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-w4rjq" Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.808001 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gxzdc"] Dec 13 03:29:17 crc kubenswrapper[5070]: W1213 03:29:17.816744 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a821721_8311_49af_872d_7ea63f878c61.slice/crio-ed7b08e5dd169b8861934a852b060d4307372683ef99e4103d53c9470768793d WatchSource:0}: Error finding container ed7b08e5dd169b8861934a852b060d4307372683ef99e4103d53c9470768793d: Status 404 returned error can't find the container with id ed7b08e5dd169b8861934a852b060d4307372683ef99e4103d53c9470768793d Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.879680 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-zl6vp"] Dec 13 03:29:17 crc kubenswrapper[5070]: I1213 03:29:17.932995 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6pt28"] Dec 13 03:29:17 crc kubenswrapper[5070]: W1213 03:29:17.937955 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd5e6065_7d41_43e2_974f_820e716a8f73.slice/crio-beee9a681bcd5806e62c96f56b8cf1b5ec971cd4d7796ba00b2eab1ddb3cb245 WatchSource:0}: Error finding container beee9a681bcd5806e62c96f56b8cf1b5ec971cd4d7796ba00b2eab1ddb3cb245: Status 404 returned error can't find the container with id beee9a681bcd5806e62c96f56b8cf1b5ec971cd4d7796ba00b2eab1ddb3cb245 Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.134672 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-w4rjq"] Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.454241 5070 generic.go:334] "Generic (PLEG): container finished" podID="9484e5db-7a78-4875-adfc-274639497868" containerID="9b3398d72e77a925b9d06709f760dad67e60bf84e008f913e08077f320815422" exitCode=0 Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.454336 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-zl6vp" event={"ID":"9484e5db-7a78-4875-adfc-274639497868","Type":"ContainerDied","Data":"9b3398d72e77a925b9d06709f760dad67e60bf84e008f913e08077f320815422"} Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.454629 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-zl6vp" event={"ID":"9484e5db-7a78-4875-adfc-274639497868","Type":"ContainerStarted","Data":"26e8e0585f1e207d4e4e2b897e1be0d633f5c7240a78bfb1f9a5da4084af8527"} Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.455718 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gxzdc" event={"ID":"5a821721-8311-49af-872d-7ea63f878c61","Type":"ContainerStarted","Data":"ed7b08e5dd169b8861934a852b060d4307372683ef99e4103d53c9470768793d"} Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.457665 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-w4rjq" event={"ID":"b7cf4d27-18ae-4936-b68c-0c0bc779e5ba","Type":"ContainerStarted","Data":"659801e4f290adc5edaa99e0e84ea4264d3e59db2955b93e6cf5f26c7fadaecf"} Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.457692 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-w4rjq" event={"ID":"b7cf4d27-18ae-4936-b68c-0c0bc779e5ba","Type":"ContainerStarted","Data":"efe370e450ede8212a185ed5a3586c522ef466e53cbdd51a10abc994b82f83a8"} Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.459497 5070 generic.go:334] "Generic (PLEG): container finished" podID="cd5e6065-7d41-43e2-974f-820e716a8f73" containerID="d56059de180814feaec0d4a697e973a90e612d92e0cda462e3c56d3ab55aa883" exitCode=0 Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.459542 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6pt28" event={"ID":"cd5e6065-7d41-43e2-974f-820e716a8f73","Type":"ContainerDied","Data":"d56059de180814feaec0d4a697e973a90e612d92e0cda462e3c56d3ab55aa883"} Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.459567 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6pt28" event={"ID":"cd5e6065-7d41-43e2-974f-820e716a8f73","Type":"ContainerStarted","Data":"beee9a681bcd5806e62c96f56b8cf1b5ec971cd4d7796ba00b2eab1ddb3cb245"} Dec 13 03:29:18 crc kubenswrapper[5070]: I1213 03:29:18.504390 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-w4rjq" podStartSLOduration=2.50436726 podStartE2EDuration="2.50436726s" podCreationTimestamp="2025-12-13 03:29:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:29:18.49775803 +0000 UTC m=+1050.733601576" watchObservedRunningTime="2025-12-13 03:29:18.50436726 +0000 UTC m=+1050.740210816" Dec 13 03:29:19 crc kubenswrapper[5070]: I1213 03:29:19.467879 5070 generic.go:334] "Generic (PLEG): container finished" podID="b7cf4d27-18ae-4936-b68c-0c0bc779e5ba" containerID="659801e4f290adc5edaa99e0e84ea4264d3e59db2955b93e6cf5f26c7fadaecf" exitCode=0 Dec 13 03:29:19 crc kubenswrapper[5070]: I1213 03:29:19.468327 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-w4rjq" event={"ID":"b7cf4d27-18ae-4936-b68c-0c0bc779e5ba","Type":"ContainerDied","Data":"659801e4f290adc5edaa99e0e84ea4264d3e59db2955b93e6cf5f26c7fadaecf"} Dec 13 03:29:21 crc kubenswrapper[5070]: I1213 03:29:21.943360 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:29:21 crc kubenswrapper[5070]: I1213 03:29:21.943737 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.500168 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-zl6vp" event={"ID":"9484e5db-7a78-4875-adfc-274639497868","Type":"ContainerDied","Data":"26e8e0585f1e207d4e4e2b897e1be0d633f5c7240a78bfb1f9a5da4084af8527"} Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.500483 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26e8e0585f1e207d4e4e2b897e1be0d633f5c7240a78bfb1f9a5da4084af8527" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.501698 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-w4rjq" event={"ID":"b7cf4d27-18ae-4936-b68c-0c0bc779e5ba","Type":"ContainerDied","Data":"efe370e450ede8212a185ed5a3586c522ef466e53cbdd51a10abc994b82f83a8"} Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.501727 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efe370e450ede8212a185ed5a3586c522ef466e53cbdd51a10abc994b82f83a8" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.502884 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6pt28" event={"ID":"cd5e6065-7d41-43e2-974f-820e716a8f73","Type":"ContainerDied","Data":"beee9a681bcd5806e62c96f56b8cf1b5ec971cd4d7796ba00b2eab1ddb3cb245"} Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.502918 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="beee9a681bcd5806e62c96f56b8cf1b5ec971cd4d7796ba00b2eab1ddb3cb245" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.580880 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6pt28" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.603160 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-zl6vp" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.622009 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-w4rjq" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.626174 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4kkg\" (UniqueName: \"kubernetes.io/projected/9484e5db-7a78-4875-adfc-274639497868-kube-api-access-c4kkg\") pod \"9484e5db-7a78-4875-adfc-274639497868\" (UID: \"9484e5db-7a78-4875-adfc-274639497868\") " Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.626302 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmgpz\" (UniqueName: \"kubernetes.io/projected/cd5e6065-7d41-43e2-974f-820e716a8f73-kube-api-access-fmgpz\") pod \"cd5e6065-7d41-43e2-974f-820e716a8f73\" (UID: \"cd5e6065-7d41-43e2-974f-820e716a8f73\") " Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.630774 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd5e6065-7d41-43e2-974f-820e716a8f73-kube-api-access-fmgpz" (OuterVolumeSpecName: "kube-api-access-fmgpz") pod "cd5e6065-7d41-43e2-974f-820e716a8f73" (UID: "cd5e6065-7d41-43e2-974f-820e716a8f73"). InnerVolumeSpecName "kube-api-access-fmgpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.637279 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9484e5db-7a78-4875-adfc-274639497868-kube-api-access-c4kkg" (OuterVolumeSpecName: "kube-api-access-c4kkg") pod "9484e5db-7a78-4875-adfc-274639497868" (UID: "9484e5db-7a78-4875-adfc-274639497868"). InnerVolumeSpecName "kube-api-access-c4kkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.728478 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wh2n\" (UniqueName: \"kubernetes.io/projected/b7cf4d27-18ae-4936-b68c-0c0bc779e5ba-kube-api-access-9wh2n\") pod \"b7cf4d27-18ae-4936-b68c-0c0bc779e5ba\" (UID: \"b7cf4d27-18ae-4936-b68c-0c0bc779e5ba\") " Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.729053 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4kkg\" (UniqueName: \"kubernetes.io/projected/9484e5db-7a78-4875-adfc-274639497868-kube-api-access-c4kkg\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.729074 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmgpz\" (UniqueName: \"kubernetes.io/projected/cd5e6065-7d41-43e2-974f-820e716a8f73-kube-api-access-fmgpz\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.731480 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7cf4d27-18ae-4936-b68c-0c0bc779e5ba-kube-api-access-9wh2n" (OuterVolumeSpecName: "kube-api-access-9wh2n") pod "b7cf4d27-18ae-4936-b68c-0c0bc779e5ba" (UID: "b7cf4d27-18ae-4936-b68c-0c0bc779e5ba"). InnerVolumeSpecName "kube-api-access-9wh2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:22 crc kubenswrapper[5070]: I1213 03:29:22.830092 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wh2n\" (UniqueName: \"kubernetes.io/projected/b7cf4d27-18ae-4936-b68c-0c0bc779e5ba-kube-api-access-9wh2n\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:23 crc kubenswrapper[5070]: I1213 03:29:23.513489 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vv4wt" event={"ID":"682caa0d-3a80-41aa-a899-8613f8454481","Type":"ContainerStarted","Data":"6c655b199cd92d3e80f20e037439480eca6807c7d128932bc536906a328be95e"} Dec 13 03:29:23 crc kubenswrapper[5070]: I1213 03:29:23.517260 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gxzdc" event={"ID":"5a821721-8311-49af-872d-7ea63f878c61","Type":"ContainerStarted","Data":"e3ad13f3619e15f567bee08b1d191b5222f8a301d2ba0d9b45caacdfa78a7e65"} Dec 13 03:29:23 crc kubenswrapper[5070]: I1213 03:29:23.517303 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6pt28" Dec 13 03:29:23 crc kubenswrapper[5070]: I1213 03:29:23.517281 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-zl6vp" Dec 13 03:29:23 crc kubenswrapper[5070]: I1213 03:29:23.522523 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-w4rjq" Dec 13 03:29:23 crc kubenswrapper[5070]: I1213 03:29:23.544870 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-vv4wt" podStartSLOduration=2.001441275 podStartE2EDuration="33.54484843s" podCreationTimestamp="2025-12-13 03:28:50 +0000 UTC" firstStartedPulling="2025-12-13 03:28:50.907139966 +0000 UTC m=+1023.142983512" lastFinishedPulling="2025-12-13 03:29:22.450547121 +0000 UTC m=+1054.686390667" observedRunningTime="2025-12-13 03:29:23.539728681 +0000 UTC m=+1055.775572267" watchObservedRunningTime="2025-12-13 03:29:23.54484843 +0000 UTC m=+1055.780691976" Dec 13 03:29:23 crc kubenswrapper[5070]: I1213 03:29:23.567517 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-gxzdc" podStartSLOduration=2.928328342 podStartE2EDuration="7.567497056s" podCreationTimestamp="2025-12-13 03:29:16 +0000 UTC" firstStartedPulling="2025-12-13 03:29:17.819154779 +0000 UTC m=+1050.054998325" lastFinishedPulling="2025-12-13 03:29:22.458323493 +0000 UTC m=+1054.694167039" observedRunningTime="2025-12-13 03:29:23.560886545 +0000 UTC m=+1055.796730111" watchObservedRunningTime="2025-12-13 03:29:23.567497056 +0000 UTC m=+1055.803340602" Dec 13 03:29:26 crc kubenswrapper[5070]: I1213 03:29:26.546724 5070 generic.go:334] "Generic (PLEG): container finished" podID="5a821721-8311-49af-872d-7ea63f878c61" containerID="e3ad13f3619e15f567bee08b1d191b5222f8a301d2ba0d9b45caacdfa78a7e65" exitCode=0 Dec 13 03:29:26 crc kubenswrapper[5070]: I1213 03:29:26.546833 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gxzdc" event={"ID":"5a821721-8311-49af-872d-7ea63f878c61","Type":"ContainerDied","Data":"e3ad13f3619e15f567bee08b1d191b5222f8a301d2ba0d9b45caacdfa78a7e65"} Dec 13 03:29:27 crc kubenswrapper[5070]: I1213 03:29:27.841265 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:27 crc kubenswrapper[5070]: I1213 03:29:27.954510 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpdz8\" (UniqueName: \"kubernetes.io/projected/5a821721-8311-49af-872d-7ea63f878c61-kube-api-access-cpdz8\") pod \"5a821721-8311-49af-872d-7ea63f878c61\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " Dec 13 03:29:27 crc kubenswrapper[5070]: I1213 03:29:27.954720 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-combined-ca-bundle\") pod \"5a821721-8311-49af-872d-7ea63f878c61\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " Dec 13 03:29:27 crc kubenswrapper[5070]: I1213 03:29:27.954808 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-config-data\") pod \"5a821721-8311-49af-872d-7ea63f878c61\" (UID: \"5a821721-8311-49af-872d-7ea63f878c61\") " Dec 13 03:29:27 crc kubenswrapper[5070]: I1213 03:29:27.962764 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a821721-8311-49af-872d-7ea63f878c61-kube-api-access-cpdz8" (OuterVolumeSpecName: "kube-api-access-cpdz8") pod "5a821721-8311-49af-872d-7ea63f878c61" (UID: "5a821721-8311-49af-872d-7ea63f878c61"). InnerVolumeSpecName "kube-api-access-cpdz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.043665 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a821721-8311-49af-872d-7ea63f878c61" (UID: "5a821721-8311-49af-872d-7ea63f878c61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.058493 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpdz8\" (UniqueName: \"kubernetes.io/projected/5a821721-8311-49af-872d-7ea63f878c61-kube-api-access-cpdz8\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.058531 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.078605 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-config-data" (OuterVolumeSpecName: "config-data") pod "5a821721-8311-49af-872d-7ea63f878c61" (UID: "5a821721-8311-49af-872d-7ea63f878c61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.159792 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a821721-8311-49af-872d-7ea63f878c61-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.562658 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gxzdc" event={"ID":"5a821721-8311-49af-872d-7ea63f878c61","Type":"ContainerDied","Data":"ed7b08e5dd169b8861934a852b060d4307372683ef99e4103d53c9470768793d"} Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.562697 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed7b08e5dd169b8861934a852b060d4307372683ef99e4103d53c9470768793d" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.562710 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gxzdc" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.834259 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66fbd85b65-nz6s7"] Dec 13 03:29:28 crc kubenswrapper[5070]: E1213 03:29:28.834921 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd5e6065-7d41-43e2-974f-820e716a8f73" containerName="mariadb-database-create" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.834942 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd5e6065-7d41-43e2-974f-820e716a8f73" containerName="mariadb-database-create" Dec 13 03:29:28 crc kubenswrapper[5070]: E1213 03:29:28.834976 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a821721-8311-49af-872d-7ea63f878c61" containerName="keystone-db-sync" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.834985 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a821721-8311-49af-872d-7ea63f878c61" containerName="keystone-db-sync" Dec 13 03:29:28 crc kubenswrapper[5070]: E1213 03:29:28.834997 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9484e5db-7a78-4875-adfc-274639497868" containerName="mariadb-database-create" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.835005 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9484e5db-7a78-4875-adfc-274639497868" containerName="mariadb-database-create" Dec 13 03:29:28 crc kubenswrapper[5070]: E1213 03:29:28.835018 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7cf4d27-18ae-4936-b68c-0c0bc779e5ba" containerName="mariadb-database-create" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.835025 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7cf4d27-18ae-4936-b68c-0c0bc779e5ba" containerName="mariadb-database-create" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.835199 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd5e6065-7d41-43e2-974f-820e716a8f73" containerName="mariadb-database-create" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.835217 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="9484e5db-7a78-4875-adfc-274639497868" containerName="mariadb-database-create" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.835228 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7cf4d27-18ae-4936-b68c-0c0bc779e5ba" containerName="mariadb-database-create" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.835243 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a821721-8311-49af-872d-7ea63f878c61" containerName="keystone-db-sync" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.836192 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.841332 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66fbd85b65-nz6s7"] Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.873184 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-dns-svc\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.873250 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-sb\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.873317 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdhcm\" (UniqueName: \"kubernetes.io/projected/c966758b-d039-45a2-a244-1be0ce984b8f-kube-api-access-mdhcm\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.873386 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-config\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.873424 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-nb\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.919688 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-ds4v5"] Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.920912 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.923808 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.924180 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-g4vc4" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.925413 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.925658 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.927412 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ds4v5"] Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976192 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-fernet-keys\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976291 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdhcm\" (UniqueName: \"kubernetes.io/projected/c966758b-d039-45a2-a244-1be0ce984b8f-kube-api-access-mdhcm\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976353 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-credential-keys\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976398 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-config\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976451 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-nb\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976481 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-config-data\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976514 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-combined-ca-bundle\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976636 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-scripts\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976720 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxm7n\" (UniqueName: \"kubernetes.io/projected/0decb492-0fba-4ae9-bd55-e9d82d77550c-kube-api-access-gxm7n\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976751 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-dns-svc\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.976796 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-sb\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.977434 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-nb\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.977733 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-dns-svc\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.977765 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-sb\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:28 crc kubenswrapper[5070]: I1213 03:29:28.978089 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-config\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.008286 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdhcm\" (UniqueName: \"kubernetes.io/projected/c966758b-d039-45a2-a244-1be0ce984b8f-kube-api-access-mdhcm\") pod \"dnsmasq-dns-66fbd85b65-nz6s7\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.063776 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.065497 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.070271 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.070296 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.077769 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-config-data\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.077820 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-combined-ca-bundle\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.077852 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-scripts\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.077887 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxm7n\" (UniqueName: \"kubernetes.io/projected/0decb492-0fba-4ae9-bd55-e9d82d77550c-kube-api-access-gxm7n\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.077926 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-fernet-keys\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.077961 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-credential-keys\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.083064 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-fernet-keys\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.083211 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-config-data\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.083326 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-credential-keys\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.083711 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-scripts\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.084346 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-combined-ca-bundle\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.103697 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.109378 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxm7n\" (UniqueName: \"kubernetes.io/projected/0decb492-0fba-4ae9-bd55-e9d82d77550c-kube-api-access-gxm7n\") pod \"keystone-bootstrap-ds4v5\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.179014 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.179471 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.179528 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.179615 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-log-httpd\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.179658 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-729cc\" (UniqueName: \"kubernetes.io/projected/62471d23-7b26-439a-9a26-e65abe4be2c1-kube-api-access-729cc\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.179761 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-config-data\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.179814 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-run-httpd\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.179835 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-scripts\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.211174 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-z9bn5"] Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.220244 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.223894 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.224088 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.224256 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-74r9b" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.226876 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-z9bn5"] Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.239294 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.251309 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66fbd85b65-nz6s7"] Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.297691 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-run-httpd\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.301702 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-run-httpd\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.316774 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bf59f66bf-pq5nh"] Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.332059 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-scripts\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.334045 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-scripts\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.334595 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.334751 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.335339 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-combined-ca-bundle\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.335526 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-config-data\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.336123 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-log-httpd\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.337688 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9abbaf32-db3e-4169-8159-6d707cdf3e2a-logs\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.337796 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-729cc\" (UniqueName: \"kubernetes.io/projected/62471d23-7b26-439a-9a26-e65abe4be2c1-kube-api-access-729cc\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.337866 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzd4x\" (UniqueName: \"kubernetes.io/projected/9abbaf32-db3e-4169-8159-6d707cdf3e2a-kube-api-access-vzd4x\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.337945 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-config-data\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.341411 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.343502 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bf59f66bf-pq5nh"] Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.344156 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-log-httpd\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.373827 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.373978 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-config-data\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.375224 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-scripts\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.385806 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.387059 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-729cc\" (UniqueName: \"kubernetes.io/projected/62471d23-7b26-439a-9a26-e65abe4be2c1-kube-api-access-729cc\") pod \"ceilometer-0\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.459221 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-config\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.459263 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-sb\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.459327 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-combined-ca-bundle\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.459350 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-config-data\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.459388 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9abbaf32-db3e-4169-8159-6d707cdf3e2a-logs\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.459422 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-dns-svc\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.459471 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnpq7\" (UniqueName: \"kubernetes.io/projected/d105fc92-98ef-4041-ae25-513512416174-kube-api-access-rnpq7\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.459494 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzd4x\" (UniqueName: \"kubernetes.io/projected/9abbaf32-db3e-4169-8159-6d707cdf3e2a-kube-api-access-vzd4x\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.459556 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-scripts\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.459594 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-nb\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.477037 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9abbaf32-db3e-4169-8159-6d707cdf3e2a-logs\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.488970 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-combined-ca-bundle\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.490812 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-scripts\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.500996 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-config-data\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.508074 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzd4x\" (UniqueName: \"kubernetes.io/projected/9abbaf32-db3e-4169-8159-6d707cdf3e2a-kube-api-access-vzd4x\") pod \"placement-db-sync-z9bn5\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.560716 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-sb\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.560809 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-dns-svc\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.560838 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnpq7\" (UniqueName: \"kubernetes.io/projected/d105fc92-98ef-4041-ae25-513512416174-kube-api-access-rnpq7\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.560900 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-nb\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.560930 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-config\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.562500 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-sb\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.562661 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-dns-svc\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.562895 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-nb\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.562904 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-config\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.571117 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.584366 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnpq7\" (UniqueName: \"kubernetes.io/projected/d105fc92-98ef-4041-ae25-513512416174-kube-api-access-rnpq7\") pod \"dnsmasq-dns-6bf59f66bf-pq5nh\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.585051 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66fbd85b65-nz6s7"] Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.686779 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.782068 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:29 crc kubenswrapper[5070]: I1213 03:29:29.891186 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ds4v5"] Dec 13 03:29:30 crc kubenswrapper[5070]: I1213 03:29:30.364707 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-z9bn5"] Dec 13 03:29:30 crc kubenswrapper[5070]: W1213 03:29:30.366754 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9abbaf32_db3e_4169_8159_6d707cdf3e2a.slice/crio-4ad6081c77a0327e0b1639adad1a127aae85d1598506dd59182a6e3b3c1488ff WatchSource:0}: Error finding container 4ad6081c77a0327e0b1639adad1a127aae85d1598506dd59182a6e3b3c1488ff: Status 404 returned error can't find the container with id 4ad6081c77a0327e0b1639adad1a127aae85d1598506dd59182a6e3b3c1488ff Dec 13 03:29:30 crc kubenswrapper[5070]: I1213 03:29:30.489782 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:29:30 crc kubenswrapper[5070]: W1213 03:29:30.491731 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62471d23_7b26_439a_9a26_e65abe4be2c1.slice/crio-cfd54d3ab9e394543f50fd49ff7def0bd1f45003063c3275399f7d3559d97134 WatchSource:0}: Error finding container cfd54d3ab9e394543f50fd49ff7def0bd1f45003063c3275399f7d3559d97134: Status 404 returned error can't find the container with id cfd54d3ab9e394543f50fd49ff7def0bd1f45003063c3275399f7d3559d97134 Dec 13 03:29:30 crc kubenswrapper[5070]: I1213 03:29:30.552654 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bf59f66bf-pq5nh"] Dec 13 03:29:30 crc kubenswrapper[5070]: I1213 03:29:30.585125 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" event={"ID":"d105fc92-98ef-4041-ae25-513512416174","Type":"ContainerStarted","Data":"4160930aaccdfc1497be7a036128b6444434ca669bd7c8749524bcb482d05281"} Dec 13 03:29:30 crc kubenswrapper[5070]: I1213 03:29:30.588491 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" event={"ID":"c966758b-d039-45a2-a244-1be0ce984b8f","Type":"ContainerStarted","Data":"83cbad72a1308cb643bc2ec8ad8c788fc0b1ddd88eed7cecda0e89feedf14514"} Dec 13 03:29:30 crc kubenswrapper[5070]: I1213 03:29:30.590494 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62471d23-7b26-439a-9a26-e65abe4be2c1","Type":"ContainerStarted","Data":"cfd54d3ab9e394543f50fd49ff7def0bd1f45003063c3275399f7d3559d97134"} Dec 13 03:29:30 crc kubenswrapper[5070]: I1213 03:29:30.592195 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-z9bn5" event={"ID":"9abbaf32-db3e-4169-8159-6d707cdf3e2a","Type":"ContainerStarted","Data":"4ad6081c77a0327e0b1639adad1a127aae85d1598506dd59182a6e3b3c1488ff"} Dec 13 03:29:30 crc kubenswrapper[5070]: I1213 03:29:30.595155 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ds4v5" event={"ID":"0decb492-0fba-4ae9-bd55-e9d82d77550c","Type":"ContainerStarted","Data":"9bfb97dc6dccee08c7cb8d490d5aae2d92df83877d27a778aacb040c16ebefdb"} Dec 13 03:29:31 crc kubenswrapper[5070]: I1213 03:29:31.192055 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:29:32 crc kubenswrapper[5070]: I1213 03:29:32.610985 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" event={"ID":"c966758b-d039-45a2-a244-1be0ce984b8f","Type":"ContainerStarted","Data":"5893b9a87bdd3586be15cbd7603c3ac5d154c4e9694f933ce0ae011f3a4712ed"} Dec 13 03:29:33 crc kubenswrapper[5070]: I1213 03:29:33.621057 5070 generic.go:334] "Generic (PLEG): container finished" podID="c966758b-d039-45a2-a244-1be0ce984b8f" containerID="5893b9a87bdd3586be15cbd7603c3ac5d154c4e9694f933ce0ae011f3a4712ed" exitCode=0 Dec 13 03:29:33 crc kubenswrapper[5070]: I1213 03:29:33.621222 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" event={"ID":"c966758b-d039-45a2-a244-1be0ce984b8f","Type":"ContainerDied","Data":"5893b9a87bdd3586be15cbd7603c3ac5d154c4e9694f933ce0ae011f3a4712ed"} Dec 13 03:29:33 crc kubenswrapper[5070]: I1213 03:29:33.624344 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ds4v5" event={"ID":"0decb492-0fba-4ae9-bd55-e9d82d77550c","Type":"ContainerStarted","Data":"c97dc7d97309ad86b2bb1f11e2b677901fdcb60378be5f7fc0a07467eee18526"} Dec 13 03:29:33 crc kubenswrapper[5070]: I1213 03:29:33.681223 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-ds4v5" podStartSLOduration=5.681198535 podStartE2EDuration="5.681198535s" podCreationTimestamp="2025-12-13 03:29:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:29:33.668383146 +0000 UTC m=+1065.904226712" watchObservedRunningTime="2025-12-13 03:29:33.681198535 +0000 UTC m=+1065.917042091" Dec 13 03:29:33 crc kubenswrapper[5070]: I1213 03:29:33.955936 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.154071 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdhcm\" (UniqueName: \"kubernetes.io/projected/c966758b-d039-45a2-a244-1be0ce984b8f-kube-api-access-mdhcm\") pod \"c966758b-d039-45a2-a244-1be0ce984b8f\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.154311 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-nb\") pod \"c966758b-d039-45a2-a244-1be0ce984b8f\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.154431 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-dns-svc\") pod \"c966758b-d039-45a2-a244-1be0ce984b8f\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.155239 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-sb\") pod \"c966758b-d039-45a2-a244-1be0ce984b8f\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.155298 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-config\") pod \"c966758b-d039-45a2-a244-1be0ce984b8f\" (UID: \"c966758b-d039-45a2-a244-1be0ce984b8f\") " Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.160692 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c966758b-d039-45a2-a244-1be0ce984b8f-kube-api-access-mdhcm" (OuterVolumeSpecName: "kube-api-access-mdhcm") pod "c966758b-d039-45a2-a244-1be0ce984b8f" (UID: "c966758b-d039-45a2-a244-1be0ce984b8f"). InnerVolumeSpecName "kube-api-access-mdhcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.179152 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-config" (OuterVolumeSpecName: "config") pod "c966758b-d039-45a2-a244-1be0ce984b8f" (UID: "c966758b-d039-45a2-a244-1be0ce984b8f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.184676 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c966758b-d039-45a2-a244-1be0ce984b8f" (UID: "c966758b-d039-45a2-a244-1be0ce984b8f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.193960 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c966758b-d039-45a2-a244-1be0ce984b8f" (UID: "c966758b-d039-45a2-a244-1be0ce984b8f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.208320 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c966758b-d039-45a2-a244-1be0ce984b8f" (UID: "c966758b-d039-45a2-a244-1be0ce984b8f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.259282 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.259904 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.259936 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.259948 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c966758b-d039-45a2-a244-1be0ce984b8f-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.259961 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdhcm\" (UniqueName: \"kubernetes.io/projected/c966758b-d039-45a2-a244-1be0ce984b8f-kube-api-access-mdhcm\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.642978 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.647890 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66fbd85b65-nz6s7" event={"ID":"c966758b-d039-45a2-a244-1be0ce984b8f","Type":"ContainerDied","Data":"83cbad72a1308cb643bc2ec8ad8c788fc0b1ddd88eed7cecda0e89feedf14514"} Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.648008 5070 scope.go:117] "RemoveContainer" containerID="5893b9a87bdd3586be15cbd7603c3ac5d154c4e9694f933ce0ae011f3a4712ed" Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.650794 5070 generic.go:334] "Generic (PLEG): container finished" podID="d105fc92-98ef-4041-ae25-513512416174" containerID="5ff07b4e848d86dcb8ccc8b855198be72e7092e5ea586e68a5bf5d06a7a27140" exitCode=0 Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.651254 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" event={"ID":"d105fc92-98ef-4041-ae25-513512416174","Type":"ContainerDied","Data":"5ff07b4e848d86dcb8ccc8b855198be72e7092e5ea586e68a5bf5d06a7a27140"} Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.757800 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66fbd85b65-nz6s7"] Dec 13 03:29:34 crc kubenswrapper[5070]: I1213 03:29:34.782784 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66fbd85b65-nz6s7"] Dec 13 03:29:35 crc kubenswrapper[5070]: I1213 03:29:35.660136 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" event={"ID":"d105fc92-98ef-4041-ae25-513512416174","Type":"ContainerStarted","Data":"dfd382ab00b5edd9946e24e887acec2e97fe23ab12613c99250754b132c359f5"} Dec 13 03:29:35 crc kubenswrapper[5070]: I1213 03:29:35.661742 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:35 crc kubenswrapper[5070]: I1213 03:29:35.666487 5070 generic.go:334] "Generic (PLEG): container finished" podID="682caa0d-3a80-41aa-a899-8613f8454481" containerID="6c655b199cd92d3e80f20e037439480eca6807c7d128932bc536906a328be95e" exitCode=0 Dec 13 03:29:35 crc kubenswrapper[5070]: I1213 03:29:35.666547 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vv4wt" event={"ID":"682caa0d-3a80-41aa-a899-8613f8454481","Type":"ContainerDied","Data":"6c655b199cd92d3e80f20e037439480eca6807c7d128932bc536906a328be95e"} Dec 13 03:29:35 crc kubenswrapper[5070]: I1213 03:29:35.690103 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" podStartSLOduration=6.690086338 podStartE2EDuration="6.690086338s" podCreationTimestamp="2025-12-13 03:29:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:29:35.684308511 +0000 UTC m=+1067.920152067" watchObservedRunningTime="2025-12-13 03:29:35.690086338 +0000 UTC m=+1067.925929884" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.176971 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c966758b-d039-45a2-a244-1be0ce984b8f" path="/var/lib/kubelet/pods/c966758b-d039-45a2-a244-1be0ce984b8f/volumes" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.637010 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-c1b6-account-create-gls4t"] Dec 13 03:29:36 crc kubenswrapper[5070]: E1213 03:29:36.637412 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c966758b-d039-45a2-a244-1be0ce984b8f" containerName="init" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.637435 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c966758b-d039-45a2-a244-1be0ce984b8f" containerName="init" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.637659 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="c966758b-d039-45a2-a244-1be0ce984b8f" containerName="init" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.638292 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c1b6-account-create-gls4t" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.640028 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.647080 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-c1b6-account-create-gls4t"] Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.802285 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vjjz\" (UniqueName: \"kubernetes.io/projected/11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5-kube-api-access-5vjjz\") pod \"barbican-c1b6-account-create-gls4t\" (UID: \"11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5\") " pod="openstack/barbican-c1b6-account-create-gls4t" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.839126 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-1e18-account-create-f4zc5"] Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.840361 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1e18-account-create-f4zc5" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.842236 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.846660 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1e18-account-create-f4zc5"] Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.904587 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcfjf\" (UniqueName: \"kubernetes.io/projected/f4d4410c-3496-483f-9ce2-7e181589ada3-kube-api-access-mcfjf\") pod \"cinder-1e18-account-create-f4zc5\" (UID: \"f4d4410c-3496-483f-9ce2-7e181589ada3\") " pod="openstack/cinder-1e18-account-create-f4zc5" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.904742 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vjjz\" (UniqueName: \"kubernetes.io/projected/11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5-kube-api-access-5vjjz\") pod \"barbican-c1b6-account-create-gls4t\" (UID: \"11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5\") " pod="openstack/barbican-c1b6-account-create-gls4t" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.942563 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vjjz\" (UniqueName: \"kubernetes.io/projected/11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5-kube-api-access-5vjjz\") pod \"barbican-c1b6-account-create-gls4t\" (UID: \"11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5\") " pod="openstack/barbican-c1b6-account-create-gls4t" Dec 13 03:29:36 crc kubenswrapper[5070]: I1213 03:29:36.968847 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c1b6-account-create-gls4t" Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.006257 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcfjf\" (UniqueName: \"kubernetes.io/projected/f4d4410c-3496-483f-9ce2-7e181589ada3-kube-api-access-mcfjf\") pod \"cinder-1e18-account-create-f4zc5\" (UID: \"f4d4410c-3496-483f-9ce2-7e181589ada3\") " pod="openstack/cinder-1e18-account-create-f4zc5" Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.047555 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-d5b9-account-create-8mxbm"] Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.047961 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcfjf\" (UniqueName: \"kubernetes.io/projected/f4d4410c-3496-483f-9ce2-7e181589ada3-kube-api-access-mcfjf\") pod \"cinder-1e18-account-create-f4zc5\" (UID: \"f4d4410c-3496-483f-9ce2-7e181589ada3\") " pod="openstack/cinder-1e18-account-create-f4zc5" Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.052315 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d5b9-account-create-8mxbm" Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.054978 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.072928 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d5b9-account-create-8mxbm"] Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.203588 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1e18-account-create-f4zc5" Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.209752 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqlzz\" (UniqueName: \"kubernetes.io/projected/7b38e233-37d8-48ae-8e8e-56e97ec0962c-kube-api-access-nqlzz\") pod \"neutron-d5b9-account-create-8mxbm\" (UID: \"7b38e233-37d8-48ae-8e8e-56e97ec0962c\") " pod="openstack/neutron-d5b9-account-create-8mxbm" Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.311513 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqlzz\" (UniqueName: \"kubernetes.io/projected/7b38e233-37d8-48ae-8e8e-56e97ec0962c-kube-api-access-nqlzz\") pod \"neutron-d5b9-account-create-8mxbm\" (UID: \"7b38e233-37d8-48ae-8e8e-56e97ec0962c\") " pod="openstack/neutron-d5b9-account-create-8mxbm" Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.326992 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqlzz\" (UniqueName: \"kubernetes.io/projected/7b38e233-37d8-48ae-8e8e-56e97ec0962c-kube-api-access-nqlzz\") pod \"neutron-d5b9-account-create-8mxbm\" (UID: \"7b38e233-37d8-48ae-8e8e-56e97ec0962c\") " pod="openstack/neutron-d5b9-account-create-8mxbm" Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.392356 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d5b9-account-create-8mxbm" Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.686563 5070 generic.go:334] "Generic (PLEG): container finished" podID="0decb492-0fba-4ae9-bd55-e9d82d77550c" containerID="c97dc7d97309ad86b2bb1f11e2b677901fdcb60378be5f7fc0a07467eee18526" exitCode=0 Dec 13 03:29:37 crc kubenswrapper[5070]: I1213 03:29:37.686607 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ds4v5" event={"ID":"0decb492-0fba-4ae9-bd55-e9d82d77550c","Type":"ContainerDied","Data":"c97dc7d97309ad86b2bb1f11e2b677901fdcb60378be5f7fc0a07467eee18526"} Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.534846 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.541790 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vv4wt" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.671978 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-config-data\") pod \"0decb492-0fba-4ae9-bd55-e9d82d77550c\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.672044 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-combined-ca-bundle\") pod \"682caa0d-3a80-41aa-a899-8613f8454481\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.672071 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-credential-keys\") pod \"0decb492-0fba-4ae9-bd55-e9d82d77550c\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.672117 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djrdj\" (UniqueName: \"kubernetes.io/projected/682caa0d-3a80-41aa-a899-8613f8454481-kube-api-access-djrdj\") pod \"682caa0d-3a80-41aa-a899-8613f8454481\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.672140 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-scripts\") pod \"0decb492-0fba-4ae9-bd55-e9d82d77550c\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.672174 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-db-sync-config-data\") pod \"682caa0d-3a80-41aa-a899-8613f8454481\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.672852 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-fernet-keys\") pod \"0decb492-0fba-4ae9-bd55-e9d82d77550c\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.672908 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-combined-ca-bundle\") pod \"0decb492-0fba-4ae9-bd55-e9d82d77550c\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.672972 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-config-data\") pod \"682caa0d-3a80-41aa-a899-8613f8454481\" (UID: \"682caa0d-3a80-41aa-a899-8613f8454481\") " Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.673056 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxm7n\" (UniqueName: \"kubernetes.io/projected/0decb492-0fba-4ae9-bd55-e9d82d77550c-kube-api-access-gxm7n\") pod \"0decb492-0fba-4ae9-bd55-e9d82d77550c\" (UID: \"0decb492-0fba-4ae9-bd55-e9d82d77550c\") " Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.678682 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "682caa0d-3a80-41aa-a899-8613f8454481" (UID: "682caa0d-3a80-41aa-a899-8613f8454481"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.679039 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0decb492-0fba-4ae9-bd55-e9d82d77550c-kube-api-access-gxm7n" (OuterVolumeSpecName: "kube-api-access-gxm7n") pod "0decb492-0fba-4ae9-bd55-e9d82d77550c" (UID: "0decb492-0fba-4ae9-bd55-e9d82d77550c"). InnerVolumeSpecName "kube-api-access-gxm7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.679342 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "0decb492-0fba-4ae9-bd55-e9d82d77550c" (UID: "0decb492-0fba-4ae9-bd55-e9d82d77550c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.679890 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/682caa0d-3a80-41aa-a899-8613f8454481-kube-api-access-djrdj" (OuterVolumeSpecName: "kube-api-access-djrdj") pod "682caa0d-3a80-41aa-a899-8613f8454481" (UID: "682caa0d-3a80-41aa-a899-8613f8454481"). InnerVolumeSpecName "kube-api-access-djrdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.681496 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-scripts" (OuterVolumeSpecName: "scripts") pod "0decb492-0fba-4ae9-bd55-e9d82d77550c" (UID: "0decb492-0fba-4ae9-bd55-e9d82d77550c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.683885 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0decb492-0fba-4ae9-bd55-e9d82d77550c" (UID: "0decb492-0fba-4ae9-bd55-e9d82d77550c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.700924 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0decb492-0fba-4ae9-bd55-e9d82d77550c" (UID: "0decb492-0fba-4ae9-bd55-e9d82d77550c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.704042 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-config-data" (OuterVolumeSpecName: "config-data") pod "0decb492-0fba-4ae9-bd55-e9d82d77550c" (UID: "0decb492-0fba-4ae9-bd55-e9d82d77550c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.705119 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vv4wt" event={"ID":"682caa0d-3a80-41aa-a899-8613f8454481","Type":"ContainerDied","Data":"035a85ca2cb3e0a4c7a046370d04a08a93832d84d421f66cb3a5c8ae8ee1ef4e"} Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.705370 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="035a85ca2cb3e0a4c7a046370d04a08a93832d84d421f66cb3a5c8ae8ee1ef4e" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.705472 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "682caa0d-3a80-41aa-a899-8613f8454481" (UID: "682caa0d-3a80-41aa-a899-8613f8454481"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.705514 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vv4wt" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.707147 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ds4v5" event={"ID":"0decb492-0fba-4ae9-bd55-e9d82d77550c","Type":"ContainerDied","Data":"9bfb97dc6dccee08c7cb8d490d5aae2d92df83877d27a778aacb040c16ebefdb"} Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.707486 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bfb97dc6dccee08c7cb8d490d5aae2d92df83877d27a778aacb040c16ebefdb" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.707212 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ds4v5" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.725279 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-config-data" (OuterVolumeSpecName: "config-data") pod "682caa0d-3a80-41aa-a899-8613f8454481" (UID: "682caa0d-3a80-41aa-a899-8613f8454481"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.768464 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-ds4v5"] Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.775019 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.775053 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.775063 5070 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.775073 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djrdj\" (UniqueName: \"kubernetes.io/projected/682caa0d-3a80-41aa-a899-8613f8454481-kube-api-access-djrdj\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.775083 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.775092 5070 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.775100 5070 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.775108 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0decb492-0fba-4ae9-bd55-e9d82d77550c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.775116 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/682caa0d-3a80-41aa-a899-8613f8454481-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.775124 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxm7n\" (UniqueName: \"kubernetes.io/projected/0decb492-0fba-4ae9-bd55-e9d82d77550c-kube-api-access-gxm7n\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.776775 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-ds4v5"] Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.784694 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.840328 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-dtgg8"] Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.840588 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-dtgg8" podUID="9368a062-8e24-48ec-9a22-c29f1c906361" containerName="dnsmasq-dns" containerID="cri-o://7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca" gracePeriod=10 Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.890626 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-4v7hg"] Dec 13 03:29:39 crc kubenswrapper[5070]: E1213 03:29:39.891264 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0decb492-0fba-4ae9-bd55-e9d82d77550c" containerName="keystone-bootstrap" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.891282 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="0decb492-0fba-4ae9-bd55-e9d82d77550c" containerName="keystone-bootstrap" Dec 13 03:29:39 crc kubenswrapper[5070]: E1213 03:29:39.891298 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="682caa0d-3a80-41aa-a899-8613f8454481" containerName="glance-db-sync" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.891305 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="682caa0d-3a80-41aa-a899-8613f8454481" containerName="glance-db-sync" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.891490 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="682caa0d-3a80-41aa-a899-8613f8454481" containerName="glance-db-sync" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.891516 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="0decb492-0fba-4ae9-bd55-e9d82d77550c" containerName="keystone-bootstrap" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.892133 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.896683 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.896698 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.896751 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-g4vc4" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.896855 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 13 03:29:39 crc kubenswrapper[5070]: I1213 03:29:39.900037 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-4v7hg"] Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.081597 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-combined-ca-bundle\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.081657 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-fernet-keys\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.081710 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-config-data\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.081729 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-credential-keys\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.081752 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffsvj\" (UniqueName: \"kubernetes.io/projected/36830906-e994-4294-8cef-75a40f39ede4-kube-api-access-ffsvj\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.081783 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-scripts\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.182859 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-fernet-keys\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.182959 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-config-data\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.182986 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-credential-keys\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.183017 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffsvj\" (UniqueName: \"kubernetes.io/projected/36830906-e994-4294-8cef-75a40f39ede4-kube-api-access-ffsvj\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.183054 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-scripts\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.183141 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-combined-ca-bundle\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.183541 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0decb492-0fba-4ae9-bd55-e9d82d77550c" path="/var/lib/kubelet/pods/0decb492-0fba-4ae9-bd55-e9d82d77550c/volumes" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.204696 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffsvj\" (UniqueName: \"kubernetes.io/projected/36830906-e994-4294-8cef-75a40f39ede4-kube-api-access-ffsvj\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.209778 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-credential-keys\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.209593 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-fernet-keys\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.213798 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-scripts\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.214686 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-config-data\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.224217 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-combined-ca-bundle\") pod \"keystone-bootstrap-4v7hg\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.282079 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.330458 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.487500 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-config\") pod \"9368a062-8e24-48ec-9a22-c29f1c906361\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.488434 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-nb\") pod \"9368a062-8e24-48ec-9a22-c29f1c906361\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.488604 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-456jz\" (UniqueName: \"kubernetes.io/projected/9368a062-8e24-48ec-9a22-c29f1c906361-kube-api-access-456jz\") pod \"9368a062-8e24-48ec-9a22-c29f1c906361\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.488627 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-dns-svc\") pod \"9368a062-8e24-48ec-9a22-c29f1c906361\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.488651 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-sb\") pod \"9368a062-8e24-48ec-9a22-c29f1c906361\" (UID: \"9368a062-8e24-48ec-9a22-c29f1c906361\") " Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.492770 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9368a062-8e24-48ec-9a22-c29f1c906361-kube-api-access-456jz" (OuterVolumeSpecName: "kube-api-access-456jz") pod "9368a062-8e24-48ec-9a22-c29f1c906361" (UID: "9368a062-8e24-48ec-9a22-c29f1c906361"). InnerVolumeSpecName "kube-api-access-456jz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.551718 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9368a062-8e24-48ec-9a22-c29f1c906361" (UID: "9368a062-8e24-48ec-9a22-c29f1c906361"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.561855 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-config" (OuterVolumeSpecName: "config") pod "9368a062-8e24-48ec-9a22-c29f1c906361" (UID: "9368a062-8e24-48ec-9a22-c29f1c906361"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.563297 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9368a062-8e24-48ec-9a22-c29f1c906361" (UID: "9368a062-8e24-48ec-9a22-c29f1c906361"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.564212 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9368a062-8e24-48ec-9a22-c29f1c906361" (UID: "9368a062-8e24-48ec-9a22-c29f1c906361"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.593038 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.593083 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.593097 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-456jz\" (UniqueName: \"kubernetes.io/projected/9368a062-8e24-48ec-9a22-c29f1c906361-kube-api-access-456jz\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.593110 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.593120 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9368a062-8e24-48ec-9a22-c29f1c906361-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.656047 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d5b9-account-create-8mxbm"] Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.712676 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1e18-account-create-f4zc5"] Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.737919 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d5b9-account-create-8mxbm" event={"ID":"7b38e233-37d8-48ae-8e8e-56e97ec0962c","Type":"ContainerStarted","Data":"93cdddb019de1bcd7127d3ee50e433599cd4c8c59ef18f14862440a38405f8cd"} Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.745062 5070 generic.go:334] "Generic (PLEG): container finished" podID="9368a062-8e24-48ec-9a22-c29f1c906361" containerID="7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca" exitCode=0 Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.745143 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-dtgg8" event={"ID":"9368a062-8e24-48ec-9a22-c29f1c906361","Type":"ContainerDied","Data":"7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca"} Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.745175 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-dtgg8" event={"ID":"9368a062-8e24-48ec-9a22-c29f1c906361","Type":"ContainerDied","Data":"15f99c70ef592730938994014ec363d8845c01410abd4764b5d37e3de2c35578"} Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.745196 5070 scope.go:117] "RemoveContainer" containerID="7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.745349 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-dtgg8" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.749086 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62471d23-7b26-439a-9a26-e65abe4be2c1","Type":"ContainerStarted","Data":"fa180f2a9f89d575b0dbc31f469ceef043ded814f2ca2631bdd8c215ba12b2bb"} Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.751665 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-z9bn5" event={"ID":"9abbaf32-db3e-4169-8159-6d707cdf3e2a","Type":"ContainerStarted","Data":"1368fd0d5b1bb1fd7edd55f9cc013293d39694fefbc7d2f11c297555ad6dee55"} Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.752580 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-c1b6-account-create-gls4t"] Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.785231 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-z9bn5" podStartSLOduration=2.061822831 podStartE2EDuration="11.785206613s" podCreationTimestamp="2025-12-13 03:29:29 +0000 UTC" firstStartedPulling="2025-12-13 03:29:30.368684464 +0000 UTC m=+1062.604528000" lastFinishedPulling="2025-12-13 03:29:40.092068236 +0000 UTC m=+1072.327911782" observedRunningTime="2025-12-13 03:29:40.772096276 +0000 UTC m=+1073.007939822" watchObservedRunningTime="2025-12-13 03:29:40.785206613 +0000 UTC m=+1073.021050159" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.798673 5070 scope.go:117] "RemoveContainer" containerID="2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.813511 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-dtgg8"] Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.832161 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-dtgg8"] Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.855006 5070 scope.go:117] "RemoveContainer" containerID="7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca" Dec 13 03:29:40 crc kubenswrapper[5070]: E1213 03:29:40.859202 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca\": container with ID starting with 7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca not found: ID does not exist" containerID="7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.859260 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca"} err="failed to get container status \"7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca\": rpc error: code = NotFound desc = could not find container \"7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca\": container with ID starting with 7fe6a58a855d6aaf3a03c7fa8a216360fc41ad225d4f91a8de791013424fccca not found: ID does not exist" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.859289 5070 scope.go:117] "RemoveContainer" containerID="2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2" Dec 13 03:29:40 crc kubenswrapper[5070]: E1213 03:29:40.859675 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2\": container with ID starting with 2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2 not found: ID does not exist" containerID="2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.859697 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2"} err="failed to get container status \"2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2\": rpc error: code = NotFound desc = could not find container \"2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2\": container with ID starting with 2d08fe5232ac49969b29e9d870449b5c7698c02f1b3e06fdf45faf30448ecef2 not found: ID does not exist" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.903377 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-4v7hg"] Dec 13 03:29:40 crc kubenswrapper[5070]: W1213 03:29:40.910015 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36830906_e994_4294_8cef_75a40f39ede4.slice/crio-23fcc08eef08a1ab3d5e09ce048b9a12c9eeef9c5c241311f16ef59826e38e9f WatchSource:0}: Error finding container 23fcc08eef08a1ab3d5e09ce048b9a12c9eeef9c5c241311f16ef59826e38e9f: Status 404 returned error can't find the container with id 23fcc08eef08a1ab3d5e09ce048b9a12c9eeef9c5c241311f16ef59826e38e9f Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.985526 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr"] Dec 13 03:29:40 crc kubenswrapper[5070]: E1213 03:29:40.986263 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9368a062-8e24-48ec-9a22-c29f1c906361" containerName="init" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.986274 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9368a062-8e24-48ec-9a22-c29f1c906361" containerName="init" Dec 13 03:29:40 crc kubenswrapper[5070]: E1213 03:29:40.986300 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9368a062-8e24-48ec-9a22-c29f1c906361" containerName="dnsmasq-dns" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.986306 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9368a062-8e24-48ec-9a22-c29f1c906361" containerName="dnsmasq-dns" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.986630 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="9368a062-8e24-48ec-9a22-c29f1c906361" containerName="dnsmasq-dns" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.987864 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:40 crc kubenswrapper[5070]: I1213 03:29:40.996143 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr"] Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.133331 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qx45\" (UniqueName: \"kubernetes.io/projected/8b97be4b-4027-4f04-b290-eca32cde927b-kube-api-access-7qx45\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.133668 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.133690 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.133708 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-config\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.133731 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.237472 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qx45\" (UniqueName: \"kubernetes.io/projected/8b97be4b-4027-4f04-b290-eca32cde927b-kube-api-access-7qx45\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.237577 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.237612 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.237636 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-config\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.237663 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.239225 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.239913 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-config\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.240682 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.240808 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.275404 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qx45\" (UniqueName: \"kubernetes.io/projected/8b97be4b-4027-4f04-b290-eca32cde927b-kube-api-access-7qx45\") pod \"dnsmasq-dns-5b6dbdb6f5-6h6vr\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.354989 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.779409 5070 generic.go:334] "Generic (PLEG): container finished" podID="7b38e233-37d8-48ae-8e8e-56e97ec0962c" containerID="eb464549852bbc83dc78dc390598abeb11fa6b356fd094ad257ca2dd66bae968" exitCode=0 Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.779719 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d5b9-account-create-8mxbm" event={"ID":"7b38e233-37d8-48ae-8e8e-56e97ec0962c","Type":"ContainerDied","Data":"eb464549852bbc83dc78dc390598abeb11fa6b356fd094ad257ca2dd66bae968"} Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.782321 5070 generic.go:334] "Generic (PLEG): container finished" podID="11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5" containerID="f81f4c1d7b48763ec0adc33440d0506c7adc94587a7453141e397e59e4281c47" exitCode=0 Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.782399 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c1b6-account-create-gls4t" event={"ID":"11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5","Type":"ContainerDied","Data":"f81f4c1d7b48763ec0adc33440d0506c7adc94587a7453141e397e59e4281c47"} Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.782432 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c1b6-account-create-gls4t" event={"ID":"11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5","Type":"ContainerStarted","Data":"1beebefe4cf438b48d58e9f971d743b1d85fc991b07f2cbaaedb675c58b2c13e"} Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.784047 5070 generic.go:334] "Generic (PLEG): container finished" podID="f4d4410c-3496-483f-9ce2-7e181589ada3" containerID="df723113fedaec855e5a76652062c5c6b1cd858ce93b298cf3e007fb42a9a0fb" exitCode=0 Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.784096 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1e18-account-create-f4zc5" event={"ID":"f4d4410c-3496-483f-9ce2-7e181589ada3","Type":"ContainerDied","Data":"df723113fedaec855e5a76652062c5c6b1cd858ce93b298cf3e007fb42a9a0fb"} Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.784116 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1e18-account-create-f4zc5" event={"ID":"f4d4410c-3496-483f-9ce2-7e181589ada3","Type":"ContainerStarted","Data":"3d752b849405f9710fc11b573a1fa3091c7d893949dbd875f180d331f43efe14"} Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.786626 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4v7hg" event={"ID":"36830906-e994-4294-8cef-75a40f39ede4","Type":"ContainerStarted","Data":"d964632d8a44a86f34225388cf8000e62afaa95a2549cd9d322698a1817e9987"} Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.786653 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4v7hg" event={"ID":"36830906-e994-4294-8cef-75a40f39ede4","Type":"ContainerStarted","Data":"23fcc08eef08a1ab3d5e09ce048b9a12c9eeef9c5c241311f16ef59826e38e9f"} Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.865268 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr"] Dec 13 03:29:41 crc kubenswrapper[5070]: I1213 03:29:41.896582 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-4v7hg" podStartSLOduration=2.896565095 podStartE2EDuration="2.896565095s" podCreationTimestamp="2025-12-13 03:29:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:29:41.851710776 +0000 UTC m=+1074.087554322" watchObservedRunningTime="2025-12-13 03:29:41.896565095 +0000 UTC m=+1074.132408631" Dec 13 03:29:42 crc kubenswrapper[5070]: I1213 03:29:42.178338 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9368a062-8e24-48ec-9a22-c29f1c906361" path="/var/lib/kubelet/pods/9368a062-8e24-48ec-9a22-c29f1c906361/volumes" Dec 13 03:29:42 crc kubenswrapper[5070]: I1213 03:29:42.799983 5070 generic.go:334] "Generic (PLEG): container finished" podID="8b97be4b-4027-4f04-b290-eca32cde927b" containerID="03fde9ac04e5422bd3245307e9f6d5b5a302a7083e81227bc03af03a4020f056" exitCode=0 Dec 13 03:29:42 crc kubenswrapper[5070]: I1213 03:29:42.800265 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" event={"ID":"8b97be4b-4027-4f04-b290-eca32cde927b","Type":"ContainerDied","Data":"03fde9ac04e5422bd3245307e9f6d5b5a302a7083e81227bc03af03a4020f056"} Dec 13 03:29:42 crc kubenswrapper[5070]: I1213 03:29:42.800295 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" event={"ID":"8b97be4b-4027-4f04-b290-eca32cde927b","Type":"ContainerStarted","Data":"9f3a2a441614832f69930ca1d4599b9ca06bbb16daf7108961f055943a9168a2"} Dec 13 03:29:42 crc kubenswrapper[5070]: I1213 03:29:42.808827 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62471d23-7b26-439a-9a26-e65abe4be2c1","Type":"ContainerStarted","Data":"33cfaac0d264e2b7383afbcae98b1f49c91329b9f2e464dbe3a06848b14d524b"} Dec 13 03:29:42 crc kubenswrapper[5070]: I1213 03:29:42.811006 5070 generic.go:334] "Generic (PLEG): container finished" podID="9abbaf32-db3e-4169-8159-6d707cdf3e2a" containerID="1368fd0d5b1bb1fd7edd55f9cc013293d39694fefbc7d2f11c297555ad6dee55" exitCode=0 Dec 13 03:29:42 crc kubenswrapper[5070]: I1213 03:29:42.811687 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-z9bn5" event={"ID":"9abbaf32-db3e-4169-8159-6d707cdf3e2a","Type":"ContainerDied","Data":"1368fd0d5b1bb1fd7edd55f9cc013293d39694fefbc7d2f11c297555ad6dee55"} Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.368077 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c1b6-account-create-gls4t" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.382201 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d5b9-account-create-8mxbm" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.391351 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1e18-account-create-f4zc5" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.557738 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcfjf\" (UniqueName: \"kubernetes.io/projected/f4d4410c-3496-483f-9ce2-7e181589ada3-kube-api-access-mcfjf\") pod \"f4d4410c-3496-483f-9ce2-7e181589ada3\" (UID: \"f4d4410c-3496-483f-9ce2-7e181589ada3\") " Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.558020 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqlzz\" (UniqueName: \"kubernetes.io/projected/7b38e233-37d8-48ae-8e8e-56e97ec0962c-kube-api-access-nqlzz\") pod \"7b38e233-37d8-48ae-8e8e-56e97ec0962c\" (UID: \"7b38e233-37d8-48ae-8e8e-56e97ec0962c\") " Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.558098 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vjjz\" (UniqueName: \"kubernetes.io/projected/11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5-kube-api-access-5vjjz\") pod \"11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5\" (UID: \"11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5\") " Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.565965 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b38e233-37d8-48ae-8e8e-56e97ec0962c-kube-api-access-nqlzz" (OuterVolumeSpecName: "kube-api-access-nqlzz") pod "7b38e233-37d8-48ae-8e8e-56e97ec0962c" (UID: "7b38e233-37d8-48ae-8e8e-56e97ec0962c"). InnerVolumeSpecName "kube-api-access-nqlzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.568772 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4d4410c-3496-483f-9ce2-7e181589ada3-kube-api-access-mcfjf" (OuterVolumeSpecName: "kube-api-access-mcfjf") pod "f4d4410c-3496-483f-9ce2-7e181589ada3" (UID: "f4d4410c-3496-483f-9ce2-7e181589ada3"). InnerVolumeSpecName "kube-api-access-mcfjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.569932 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5-kube-api-access-5vjjz" (OuterVolumeSpecName: "kube-api-access-5vjjz") pod "11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5" (UID: "11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5"). InnerVolumeSpecName "kube-api-access-5vjjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.659815 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcfjf\" (UniqueName: \"kubernetes.io/projected/f4d4410c-3496-483f-9ce2-7e181589ada3-kube-api-access-mcfjf\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.659849 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqlzz\" (UniqueName: \"kubernetes.io/projected/7b38e233-37d8-48ae-8e8e-56e97ec0962c-kube-api-access-nqlzz\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.659858 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vjjz\" (UniqueName: \"kubernetes.io/projected/11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5-kube-api-access-5vjjz\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.820176 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" event={"ID":"8b97be4b-4027-4f04-b290-eca32cde927b","Type":"ContainerStarted","Data":"7a6fef5ee68cc0dcb9d8e0e8fe72913249e8cb020691505d5822f10c8b21696f"} Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.820361 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.823596 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c1b6-account-create-gls4t" event={"ID":"11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5","Type":"ContainerDied","Data":"1beebefe4cf438b48d58e9f971d743b1d85fc991b07f2cbaaedb675c58b2c13e"} Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.823636 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c1b6-account-create-gls4t" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.823646 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1beebefe4cf438b48d58e9f971d743b1d85fc991b07f2cbaaedb675c58b2c13e" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.825710 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1e18-account-create-f4zc5" event={"ID":"f4d4410c-3496-483f-9ce2-7e181589ada3","Type":"ContainerDied","Data":"3d752b849405f9710fc11b573a1fa3091c7d893949dbd875f180d331f43efe14"} Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.825740 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d752b849405f9710fc11b573a1fa3091c7d893949dbd875f180d331f43efe14" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.825804 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1e18-account-create-f4zc5" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.848753 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d5b9-account-create-8mxbm" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.851570 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d5b9-account-create-8mxbm" event={"ID":"7b38e233-37d8-48ae-8e8e-56e97ec0962c","Type":"ContainerDied","Data":"93cdddb019de1bcd7127d3ee50e433599cd4c8c59ef18f14862440a38405f8cd"} Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.851618 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93cdddb019de1bcd7127d3ee50e433599cd4c8c59ef18f14862440a38405f8cd" Dec 13 03:29:43 crc kubenswrapper[5070]: I1213 03:29:43.859082 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" podStartSLOduration=3.859060008 podStartE2EDuration="3.859060008s" podCreationTimestamp="2025-12-13 03:29:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:29:43.847935425 +0000 UTC m=+1076.083778971" watchObservedRunningTime="2025-12-13 03:29:43.859060008 +0000 UTC m=+1076.094903554" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.472756 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.529063 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-scripts\") pod \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.529140 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-config-data\") pod \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.529272 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-combined-ca-bundle\") pod \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.529311 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9abbaf32-db3e-4169-8159-6d707cdf3e2a-logs\") pod \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.529371 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzd4x\" (UniqueName: \"kubernetes.io/projected/9abbaf32-db3e-4169-8159-6d707cdf3e2a-kube-api-access-vzd4x\") pod \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\" (UID: \"9abbaf32-db3e-4169-8159-6d707cdf3e2a\") " Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.533921 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9abbaf32-db3e-4169-8159-6d707cdf3e2a-logs" (OuterVolumeSpecName: "logs") pod "9abbaf32-db3e-4169-8159-6d707cdf3e2a" (UID: "9abbaf32-db3e-4169-8159-6d707cdf3e2a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.534561 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9abbaf32-db3e-4169-8159-6d707cdf3e2a-kube-api-access-vzd4x" (OuterVolumeSpecName: "kube-api-access-vzd4x") pod "9abbaf32-db3e-4169-8159-6d707cdf3e2a" (UID: "9abbaf32-db3e-4169-8159-6d707cdf3e2a"). InnerVolumeSpecName "kube-api-access-vzd4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.537084 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-scripts" (OuterVolumeSpecName: "scripts") pod "9abbaf32-db3e-4169-8159-6d707cdf3e2a" (UID: "9abbaf32-db3e-4169-8159-6d707cdf3e2a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.595265 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-config-data" (OuterVolumeSpecName: "config-data") pod "9abbaf32-db3e-4169-8159-6d707cdf3e2a" (UID: "9abbaf32-db3e-4169-8159-6d707cdf3e2a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.598433 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9abbaf32-db3e-4169-8159-6d707cdf3e2a" (UID: "9abbaf32-db3e-4169-8159-6d707cdf3e2a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.631739 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.631952 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9abbaf32-db3e-4169-8159-6d707cdf3e2a-logs\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.631969 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzd4x\" (UniqueName: \"kubernetes.io/projected/9abbaf32-db3e-4169-8159-6d707cdf3e2a-kube-api-access-vzd4x\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.631980 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.631988 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9abbaf32-db3e-4169-8159-6d707cdf3e2a-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.861303 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-z9bn5" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.861428 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-z9bn5" event={"ID":"9abbaf32-db3e-4169-8159-6d707cdf3e2a","Type":"ContainerDied","Data":"4ad6081c77a0327e0b1639adad1a127aae85d1598506dd59182a6e3b3c1488ff"} Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.862592 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ad6081c77a0327e0b1639adad1a127aae85d1598506dd59182a6e3b3c1488ff" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.955978 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6ff9d66794-qds4v"] Dec 13 03:29:44 crc kubenswrapper[5070]: E1213 03:29:44.956552 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5" containerName="mariadb-account-create" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.956577 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5" containerName="mariadb-account-create" Dec 13 03:29:44 crc kubenswrapper[5070]: E1213 03:29:44.956612 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b38e233-37d8-48ae-8e8e-56e97ec0962c" containerName="mariadb-account-create" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.956620 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b38e233-37d8-48ae-8e8e-56e97ec0962c" containerName="mariadb-account-create" Dec 13 03:29:44 crc kubenswrapper[5070]: E1213 03:29:44.956637 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4d4410c-3496-483f-9ce2-7e181589ada3" containerName="mariadb-account-create" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.956645 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4d4410c-3496-483f-9ce2-7e181589ada3" containerName="mariadb-account-create" Dec 13 03:29:44 crc kubenswrapper[5070]: E1213 03:29:44.956660 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abbaf32-db3e-4169-8159-6d707cdf3e2a" containerName="placement-db-sync" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.956667 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abbaf32-db3e-4169-8159-6d707cdf3e2a" containerName="placement-db-sync" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.957049 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b38e233-37d8-48ae-8e8e-56e97ec0962c" containerName="mariadb-account-create" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.957073 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="9abbaf32-db3e-4169-8159-6d707cdf3e2a" containerName="placement-db-sync" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.957088 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5" containerName="mariadb-account-create" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.957099 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4d4410c-3496-483f-9ce2-7e181589ada3" containerName="mariadb-account-create" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.959507 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.974009 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.975075 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.976577 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.980989 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.980994 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-74r9b" Dec 13 03:29:44 crc kubenswrapper[5070]: I1213 03:29:44.988554 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6ff9d66794-qds4v"] Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.041474 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-public-tls-certs\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.041560 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-internal-tls-certs\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.041602 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b374c77-6272-4397-b17e-c0bc4b8e3803-logs\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.041647 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd2xr\" (UniqueName: \"kubernetes.io/projected/9b374c77-6272-4397-b17e-c0bc4b8e3803-kube-api-access-vd2xr\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.041667 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-combined-ca-bundle\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.041702 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-scripts\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.041723 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-config-data\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.143317 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-scripts\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.143370 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-config-data\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.143417 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-public-tls-certs\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.143473 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-internal-tls-certs\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.143505 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b374c77-6272-4397-b17e-c0bc4b8e3803-logs\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.143532 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd2xr\" (UniqueName: \"kubernetes.io/projected/9b374c77-6272-4397-b17e-c0bc4b8e3803-kube-api-access-vd2xr\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.143551 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-combined-ca-bundle\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.144476 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b374c77-6272-4397-b17e-c0bc4b8e3803-logs\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.152085 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-combined-ca-bundle\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.158326 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-scripts\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.158661 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-internal-tls-certs\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.158909 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-config-data\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.159223 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b374c77-6272-4397-b17e-c0bc4b8e3803-public-tls-certs\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.162515 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd2xr\" (UniqueName: \"kubernetes.io/projected/9b374c77-6272-4397-b17e-c0bc4b8e3803-kube-api-access-vd2xr\") pod \"placement-6ff9d66794-qds4v\" (UID: \"9b374c77-6272-4397-b17e-c0bc4b8e3803\") " pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.305629 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.742372 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6ff9d66794-qds4v"] Dec 13 03:29:45 crc kubenswrapper[5070]: W1213 03:29:45.769629 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b374c77_6272_4397_b17e_c0bc4b8e3803.slice/crio-4e2413d0fff11f7b977fb48ea09dea34a4fa7343c47201065905d9b57526d88a WatchSource:0}: Error finding container 4e2413d0fff11f7b977fb48ea09dea34a4fa7343c47201065905d9b57526d88a: Status 404 returned error can't find the container with id 4e2413d0fff11f7b977fb48ea09dea34a4fa7343c47201065905d9b57526d88a Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.894341 5070 generic.go:334] "Generic (PLEG): container finished" podID="36830906-e994-4294-8cef-75a40f39ede4" containerID="d964632d8a44a86f34225388cf8000e62afaa95a2549cd9d322698a1817e9987" exitCode=0 Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.894618 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4v7hg" event={"ID":"36830906-e994-4294-8cef-75a40f39ede4","Type":"ContainerDied","Data":"d964632d8a44a86f34225388cf8000e62afaa95a2549cd9d322698a1817e9987"} Dec 13 03:29:45 crc kubenswrapper[5070]: I1213 03:29:45.897614 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6ff9d66794-qds4v" event={"ID":"9b374c77-6272-4397-b17e-c0bc4b8e3803","Type":"ContainerStarted","Data":"4e2413d0fff11f7b977fb48ea09dea34a4fa7343c47201065905d9b57526d88a"} Dec 13 03:29:46 crc kubenswrapper[5070]: I1213 03:29:46.923343 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6ff9d66794-qds4v" event={"ID":"9b374c77-6272-4397-b17e-c0bc4b8e3803","Type":"ContainerStarted","Data":"e6daa50369fa4616c58e70aed30208c3138c12afb0498f2fc9b9ae526d9a9c19"} Dec 13 03:29:46 crc kubenswrapper[5070]: I1213 03:29:46.941504 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-csqcx"] Dec 13 03:29:46 crc kubenswrapper[5070]: I1213 03:29:46.942620 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:46 crc kubenswrapper[5070]: I1213 03:29:46.944567 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 13 03:29:46 crc kubenswrapper[5070]: I1213 03:29:46.946754 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ff62v" Dec 13 03:29:46 crc kubenswrapper[5070]: I1213 03:29:46.952109 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-csqcx"] Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.000419 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-db-sync-config-data\") pod \"barbican-db-sync-csqcx\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.000475 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhskf\" (UniqueName: \"kubernetes.io/projected/759f7e37-a2f6-4a1b-a220-24397c94b928-kube-api-access-rhskf\") pod \"barbican-db-sync-csqcx\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.000503 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-combined-ca-bundle\") pod \"barbican-db-sync-csqcx\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.102632 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-db-sync-config-data\") pod \"barbican-db-sync-csqcx\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.102682 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhskf\" (UniqueName: \"kubernetes.io/projected/759f7e37-a2f6-4a1b-a220-24397c94b928-kube-api-access-rhskf\") pod \"barbican-db-sync-csqcx\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.102707 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-combined-ca-bundle\") pod \"barbican-db-sync-csqcx\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.109349 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-db-sync-config-data\") pod \"barbican-db-sync-csqcx\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.132637 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-combined-ca-bundle\") pod \"barbican-db-sync-csqcx\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.144493 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhskf\" (UniqueName: \"kubernetes.io/projected/759f7e37-a2f6-4a1b-a220-24397c94b928-kube-api-access-rhskf\") pod \"barbican-db-sync-csqcx\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.190217 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-n7ll5"] Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.202968 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.216316 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-n7ll5"] Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.217274 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.217706 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-2xw4r" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.217968 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.280550 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-csqcx" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.308842 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-combined-ca-bundle\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.308911 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-scripts\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.308958 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/64c718c6-de73-4e08-9506-dce5dc9ebffd-etc-machine-id\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.309101 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-config-data\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.309126 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r6jl\" (UniqueName: \"kubernetes.io/projected/64c718c6-de73-4e08-9506-dce5dc9ebffd-kube-api-access-9r6jl\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.309148 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-db-sync-config-data\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.349271 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-vtvhk"] Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.350695 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.353307 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-vsg2q" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.353736 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.353817 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.357199 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-vtvhk"] Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.410845 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-combined-ca-bundle\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.410899 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-scripts\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.410934 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/64c718c6-de73-4e08-9506-dce5dc9ebffd-etc-machine-id\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.410987 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-config\") pod \"neutron-db-sync-vtvhk\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.411062 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-config-data\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.411080 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r6jl\" (UniqueName: \"kubernetes.io/projected/64c718c6-de73-4e08-9506-dce5dc9ebffd-kube-api-access-9r6jl\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.411095 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-db-sync-config-data\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.411119 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-combined-ca-bundle\") pod \"neutron-db-sync-vtvhk\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.411136 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwlr5\" (UniqueName: \"kubernetes.io/projected/e3c77f64-0733-4569-b3d9-f112d9e1d02b-kube-api-access-qwlr5\") pod \"neutron-db-sync-vtvhk\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.411921 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/64c718c6-de73-4e08-9506-dce5dc9ebffd-etc-machine-id\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.416285 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-config-data\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.433573 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-db-sync-config-data\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.434556 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-scripts\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.435352 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-combined-ca-bundle\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.435785 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r6jl\" (UniqueName: \"kubernetes.io/projected/64c718c6-de73-4e08-9506-dce5dc9ebffd-kube-api-access-9r6jl\") pod \"cinder-db-sync-n7ll5\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.513108 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-config\") pod \"neutron-db-sync-vtvhk\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.513228 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-combined-ca-bundle\") pod \"neutron-db-sync-vtvhk\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.513258 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwlr5\" (UniqueName: \"kubernetes.io/projected/e3c77f64-0733-4569-b3d9-f112d9e1d02b-kube-api-access-qwlr5\") pod \"neutron-db-sync-vtvhk\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.519585 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-config\") pod \"neutron-db-sync-vtvhk\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.519982 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-combined-ca-bundle\") pod \"neutron-db-sync-vtvhk\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.531668 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwlr5\" (UniqueName: \"kubernetes.io/projected/e3c77f64-0733-4569-b3d9-f112d9e1d02b-kube-api-access-qwlr5\") pod \"neutron-db-sync-vtvhk\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.576812 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:29:47 crc kubenswrapper[5070]: I1213 03:29:47.677029 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:29:51 crc kubenswrapper[5070]: I1213 03:29:51.357673 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:29:51 crc kubenswrapper[5070]: I1213 03:29:51.404779 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bf59f66bf-pq5nh"] Dec 13 03:29:51 crc kubenswrapper[5070]: I1213 03:29:51.405047 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" podUID="d105fc92-98ef-4041-ae25-513512416174" containerName="dnsmasq-dns" containerID="cri-o://dfd382ab00b5edd9946e24e887acec2e97fe23ab12613c99250754b132c359f5" gracePeriod=10 Dec 13 03:29:51 crc kubenswrapper[5070]: I1213 03:29:51.943278 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:29:51 crc kubenswrapper[5070]: I1213 03:29:51.943347 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:29:51 crc kubenswrapper[5070]: I1213 03:29:51.943397 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:29:51 crc kubenswrapper[5070]: I1213 03:29:51.944149 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"23a1c6bfa0fe9bf90a6fdead1bc43aade8cb45302f3b76d55b4d7f69ae3c4750"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 03:29:51 crc kubenswrapper[5070]: I1213 03:29:51.944222 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://23a1c6bfa0fe9bf90a6fdead1bc43aade8cb45302f3b76d55b4d7f69ae3c4750" gracePeriod=600 Dec 13 03:29:54 crc kubenswrapper[5070]: I1213 03:29:54.783592 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" podUID="d105fc92-98ef-4041-ae25-513512416174" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: connect: connection refused" Dec 13 03:29:56 crc kubenswrapper[5070]: I1213 03:29:56.042584 5070 generic.go:334] "Generic (PLEG): container finished" podID="d105fc92-98ef-4041-ae25-513512416174" containerID="dfd382ab00b5edd9946e24e887acec2e97fe23ab12613c99250754b132c359f5" exitCode=0 Dec 13 03:29:56 crc kubenswrapper[5070]: I1213 03:29:56.042690 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" event={"ID":"d105fc92-98ef-4041-ae25-513512416174","Type":"ContainerDied","Data":"dfd382ab00b5edd9946e24e887acec2e97fe23ab12613c99250754b132c359f5"} Dec 13 03:29:56 crc kubenswrapper[5070]: I1213 03:29:56.713788 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.817932 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-credential-keys\") pod \"36830906-e994-4294-8cef-75a40f39ede4\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.818365 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-combined-ca-bundle\") pod \"36830906-e994-4294-8cef-75a40f39ede4\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.818422 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-scripts\") pod \"36830906-e994-4294-8cef-75a40f39ede4\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.818605 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-config-data\") pod \"36830906-e994-4294-8cef-75a40f39ede4\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.818644 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffsvj\" (UniqueName: \"kubernetes.io/projected/36830906-e994-4294-8cef-75a40f39ede4-kube-api-access-ffsvj\") pod \"36830906-e994-4294-8cef-75a40f39ede4\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.818669 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-fernet-keys\") pod \"36830906-e994-4294-8cef-75a40f39ede4\" (UID: \"36830906-e994-4294-8cef-75a40f39ede4\") " Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.834140 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "36830906-e994-4294-8cef-75a40f39ede4" (UID: "36830906-e994-4294-8cef-75a40f39ede4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.850891 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36830906-e994-4294-8cef-75a40f39ede4-kube-api-access-ffsvj" (OuterVolumeSpecName: "kube-api-access-ffsvj") pod "36830906-e994-4294-8cef-75a40f39ede4" (UID: "36830906-e994-4294-8cef-75a40f39ede4"). InnerVolumeSpecName "kube-api-access-ffsvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.850968 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-scripts" (OuterVolumeSpecName: "scripts") pod "36830906-e994-4294-8cef-75a40f39ede4" (UID: "36830906-e994-4294-8cef-75a40f39ede4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.856804 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "36830906-e994-4294-8cef-75a40f39ede4" (UID: "36830906-e994-4294-8cef-75a40f39ede4"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.893604 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36830906-e994-4294-8cef-75a40f39ede4" (UID: "36830906-e994-4294-8cef-75a40f39ede4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.894464 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-config-data" (OuterVolumeSpecName: "config-data") pod "36830906-e994-4294-8cef-75a40f39ede4" (UID: "36830906-e994-4294-8cef-75a40f39ede4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.920316 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.920342 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffsvj\" (UniqueName: \"kubernetes.io/projected/36830906-e994-4294-8cef-75a40f39ede4-kube-api-access-ffsvj\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.920351 5070 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.920360 5070 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.920369 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:56.920377 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36830906-e994-4294-8cef-75a40f39ede4-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.053103 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="23a1c6bfa0fe9bf90a6fdead1bc43aade8cb45302f3b76d55b4d7f69ae3c4750" exitCode=0 Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.053177 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"23a1c6bfa0fe9bf90a6fdead1bc43aade8cb45302f3b76d55b4d7f69ae3c4750"} Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.053609 5070 scope.go:117] "RemoveContainer" containerID="440f1b13ac82692ab2897557fa86ac3ce3eba37cec807bd53246344ef4b3c0b6" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.055013 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4v7hg" event={"ID":"36830906-e994-4294-8cef-75a40f39ede4","Type":"ContainerDied","Data":"23fcc08eef08a1ab3d5e09ce048b9a12c9eeef9c5c241311f16ef59826e38e9f"} Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.055032 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23fcc08eef08a1ab3d5e09ce048b9a12c9eeef9c5c241311f16ef59826e38e9f" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.055091 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4v7hg" Dec 13 03:29:57 crc kubenswrapper[5070]: E1213 03:29:57.157835 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/sg-core:latest" Dec 13 03:29:57 crc kubenswrapper[5070]: E1213 03:29:57.158093 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:quay.io/openstack-k8s-operators/sg-core:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:sg-core-conf-yaml,ReadOnly:false,MountPath:/etc/sg-core.conf.yaml,SubPath:sg-core.conf.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-729cc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(62471d23-7b26-439a-9a26-e65abe4be2c1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.889646 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-58cd5c7d4f-c98c8"] Dec 13 03:29:57 crc kubenswrapper[5070]: E1213 03:29:57.890944 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36830906-e994-4294-8cef-75a40f39ede4" containerName="keystone-bootstrap" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.890974 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="36830906-e994-4294-8cef-75a40f39ede4" containerName="keystone-bootstrap" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.891231 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="36830906-e994-4294-8cef-75a40f39ede4" containerName="keystone-bootstrap" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.892010 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.897127 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.897574 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.898913 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.902319 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-58cd5c7d4f-c98c8"] Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.903881 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.908909 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 13 03:29:57 crc kubenswrapper[5070]: I1213 03:29:57.910290 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-g4vc4" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.040810 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-fernet-keys\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.040895 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-public-tls-certs\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.040950 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctdjb\" (UniqueName: \"kubernetes.io/projected/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-kube-api-access-ctdjb\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.041018 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-internal-tls-certs\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.041050 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-config-data\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.041075 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-credential-keys\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.041128 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-scripts\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.041213 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-combined-ca-bundle\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.066267 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6ff9d66794-qds4v" event={"ID":"9b374c77-6272-4397-b17e-c0bc4b8e3803","Type":"ContainerStarted","Data":"e0375f1ee56d2599f5475b5988e549002503c4ff6032210142cb7443cc5cafe7"} Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.066526 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.066566 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.071089 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"0e606491cad6e3bd7c4c4b366efe48c2688d128966fcc1905da9fc58a2f148b4"} Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.095532 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6ff9d66794-qds4v" podStartSLOduration=14.095509267 podStartE2EDuration="14.095509267s" podCreationTimestamp="2025-12-13 03:29:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:29:58.088653541 +0000 UTC m=+1090.324497107" watchObservedRunningTime="2025-12-13 03:29:58.095509267 +0000 UTC m=+1090.331352813" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.142682 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-fernet-keys\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.142749 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-public-tls-certs\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.142797 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctdjb\" (UniqueName: \"kubernetes.io/projected/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-kube-api-access-ctdjb\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.142847 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-internal-tls-certs\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.142881 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-config-data\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.142911 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-credential-keys\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.142948 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-scripts\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.143003 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-combined-ca-bundle\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.149852 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-config-data\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.150268 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-combined-ca-bundle\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.151129 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-fernet-keys\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.152179 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-scripts\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.154693 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-public-tls-certs\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.157133 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-credential-keys\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.161042 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-internal-tls-certs\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.163010 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctdjb\" (UniqueName: \"kubernetes.io/projected/5e66755e-b36f-4931-bc87-3fcecfc5c1b4-kube-api-access-ctdjb\") pod \"keystone-58cd5c7d4f-c98c8\" (UID: \"5e66755e-b36f-4931-bc87-3fcecfc5c1b4\") " pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.231283 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.232305 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.249210 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-config\") pod \"d105fc92-98ef-4041-ae25-513512416174\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.249279 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-dns-svc\") pod \"d105fc92-98ef-4041-ae25-513512416174\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.265025 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-n7ll5"] Dec 13 03:29:58 crc kubenswrapper[5070]: W1213 03:29:58.273016 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64c718c6_de73_4e08_9506_dce5dc9ebffd.slice/crio-b986ef3e2e6151ac907c9532f9ac0bfb11c120c8b28571b6a226d508c9764a08 WatchSource:0}: Error finding container b986ef3e2e6151ac907c9532f9ac0bfb11c120c8b28571b6a226d508c9764a08: Status 404 returned error can't find the container with id b986ef3e2e6151ac907c9532f9ac0bfb11c120c8b28571b6a226d508c9764a08 Dec 13 03:29:58 crc kubenswrapper[5070]: W1213 03:29:58.282592 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3c77f64_0733_4569_b3d9_f112d9e1d02b.slice/crio-e4fb2b9f2a1bd73488cf02220ff310808a13ff0011195c0fd8b5356f8b70af98 WatchSource:0}: Error finding container e4fb2b9f2a1bd73488cf02220ff310808a13ff0011195c0fd8b5356f8b70af98: Status 404 returned error can't find the container with id e4fb2b9f2a1bd73488cf02220ff310808a13ff0011195c0fd8b5356f8b70af98 Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.289764 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-vtvhk"] Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.299546 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-csqcx"] Dec 13 03:29:58 crc kubenswrapper[5070]: W1213 03:29:58.312090 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod759f7e37_a2f6_4a1b_a220_24397c94b928.slice/crio-504872aa8301581903db50171caf16ba6f52b377d961bc582510e3869bf4f846 WatchSource:0}: Error finding container 504872aa8301581903db50171caf16ba6f52b377d961bc582510e3869bf4f846: Status 404 returned error can't find the container with id 504872aa8301581903db50171caf16ba6f52b377d961bc582510e3869bf4f846 Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.344671 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d105fc92-98ef-4041-ae25-513512416174" (UID: "d105fc92-98ef-4041-ae25-513512416174"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.351395 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-nb\") pod \"d105fc92-98ef-4041-ae25-513512416174\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.351526 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-sb\") pod \"d105fc92-98ef-4041-ae25-513512416174\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.352096 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnpq7\" (UniqueName: \"kubernetes.io/projected/d105fc92-98ef-4041-ae25-513512416174-kube-api-access-rnpq7\") pod \"d105fc92-98ef-4041-ae25-513512416174\" (UID: \"d105fc92-98ef-4041-ae25-513512416174\") " Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.352687 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.356153 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d105fc92-98ef-4041-ae25-513512416174-kube-api-access-rnpq7" (OuterVolumeSpecName: "kube-api-access-rnpq7") pod "d105fc92-98ef-4041-ae25-513512416174" (UID: "d105fc92-98ef-4041-ae25-513512416174"). InnerVolumeSpecName "kube-api-access-rnpq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.357878 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-config" (OuterVolumeSpecName: "config") pod "d105fc92-98ef-4041-ae25-513512416174" (UID: "d105fc92-98ef-4041-ae25-513512416174"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.394262 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d105fc92-98ef-4041-ae25-513512416174" (UID: "d105fc92-98ef-4041-ae25-513512416174"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.419085 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d105fc92-98ef-4041-ae25-513512416174" (UID: "d105fc92-98ef-4041-ae25-513512416174"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.454772 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.455201 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnpq7\" (UniqueName: \"kubernetes.io/projected/d105fc92-98ef-4041-ae25-513512416174-kube-api-access-rnpq7\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.455213 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.455223 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d105fc92-98ef-4041-ae25-513512416174-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 03:29:58 crc kubenswrapper[5070]: I1213 03:29:58.771921 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-58cd5c7d4f-c98c8"] Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.081062 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-58cd5c7d4f-c98c8" event={"ID":"5e66755e-b36f-4931-bc87-3fcecfc5c1b4","Type":"ContainerStarted","Data":"48743470aa5e295f7a9a6317d37db7f5567a52c5272029e8bc261e5e0394164c"} Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.081308 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-58cd5c7d4f-c98c8" event={"ID":"5e66755e-b36f-4931-bc87-3fcecfc5c1b4","Type":"ContainerStarted","Data":"29c4e24cbfe16bfb8336c00c21d698cdd7af0e7fb65d56e6728aebe11bc46691"} Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.081328 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.085466 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-n7ll5" event={"ID":"64c718c6-de73-4e08-9506-dce5dc9ebffd","Type":"ContainerStarted","Data":"b986ef3e2e6151ac907c9532f9ac0bfb11c120c8b28571b6a226d508c9764a08"} Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.088065 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-csqcx" event={"ID":"759f7e37-a2f6-4a1b-a220-24397c94b928","Type":"ContainerStarted","Data":"504872aa8301581903db50171caf16ba6f52b377d961bc582510e3869bf4f846"} Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.089874 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" event={"ID":"d105fc92-98ef-4041-ae25-513512416174","Type":"ContainerDied","Data":"4160930aaccdfc1497be7a036128b6444434ca669bd7c8749524bcb482d05281"} Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.089889 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bf59f66bf-pq5nh" Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.089908 5070 scope.go:117] "RemoveContainer" containerID="dfd382ab00b5edd9946e24e887acec2e97fe23ab12613c99250754b132c359f5" Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.092019 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vtvhk" event={"ID":"e3c77f64-0733-4569-b3d9-f112d9e1d02b","Type":"ContainerStarted","Data":"4f20ee785171c7c656f65f747eb262791a9a0023f2a95864f119504c654dd5f8"} Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.092056 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vtvhk" event={"ID":"e3c77f64-0733-4569-b3d9-f112d9e1d02b","Type":"ContainerStarted","Data":"e4fb2b9f2a1bd73488cf02220ff310808a13ff0011195c0fd8b5356f8b70af98"} Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.140102 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-58cd5c7d4f-c98c8" podStartSLOduration=2.140086644 podStartE2EDuration="2.140086644s" podCreationTimestamp="2025-12-13 03:29:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:29:59.108975879 +0000 UTC m=+1091.344819425" watchObservedRunningTime="2025-12-13 03:29:59.140086644 +0000 UTC m=+1091.375930190" Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.148648 5070 scope.go:117] "RemoveContainer" containerID="5ff07b4e848d86dcb8ccc8b855198be72e7092e5ea586e68a5bf5d06a7a27140" Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.160497 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-vtvhk" podStartSLOduration=12.160477668 podStartE2EDuration="12.160477668s" podCreationTimestamp="2025-12-13 03:29:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:29:59.144645039 +0000 UTC m=+1091.380488575" watchObservedRunningTime="2025-12-13 03:29:59.160477668 +0000 UTC m=+1091.396321214" Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.163451 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bf59f66bf-pq5nh"] Dec 13 03:29:59 crc kubenswrapper[5070]: I1213 03:29:59.184229 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bf59f66bf-pq5nh"] Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.130759 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw"] Dec 13 03:30:00 crc kubenswrapper[5070]: E1213 03:30:00.131633 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d105fc92-98ef-4041-ae25-513512416174" containerName="init" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.131659 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d105fc92-98ef-4041-ae25-513512416174" containerName="init" Dec 13 03:30:00 crc kubenswrapper[5070]: E1213 03:30:00.131705 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d105fc92-98ef-4041-ae25-513512416174" containerName="dnsmasq-dns" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.131713 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d105fc92-98ef-4041-ae25-513512416174" containerName="dnsmasq-dns" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.131934 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d105fc92-98ef-4041-ae25-513512416174" containerName="dnsmasq-dns" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.132665 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.135785 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.135809 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.139715 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw"] Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.141418 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flllr\" (UniqueName: \"kubernetes.io/projected/7e2f5c73-7275-4235-8577-09c4c10cdfb4-kube-api-access-flllr\") pod \"collect-profiles-29426610-wvnmw\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.141746 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e2f5c73-7275-4235-8577-09c4c10cdfb4-config-volume\") pod \"collect-profiles-29426610-wvnmw\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.141997 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e2f5c73-7275-4235-8577-09c4c10cdfb4-secret-volume\") pod \"collect-profiles-29426610-wvnmw\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.179597 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d105fc92-98ef-4041-ae25-513512416174" path="/var/lib/kubelet/pods/d105fc92-98ef-4041-ae25-513512416174/volumes" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.243382 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e2f5c73-7275-4235-8577-09c4c10cdfb4-config-volume\") pod \"collect-profiles-29426610-wvnmw\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.243487 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e2f5c73-7275-4235-8577-09c4c10cdfb4-secret-volume\") pod \"collect-profiles-29426610-wvnmw\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.243556 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flllr\" (UniqueName: \"kubernetes.io/projected/7e2f5c73-7275-4235-8577-09c4c10cdfb4-kube-api-access-flllr\") pod \"collect-profiles-29426610-wvnmw\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.244751 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e2f5c73-7275-4235-8577-09c4c10cdfb4-config-volume\") pod \"collect-profiles-29426610-wvnmw\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.256191 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e2f5c73-7275-4235-8577-09c4c10cdfb4-secret-volume\") pod \"collect-profiles-29426610-wvnmw\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.265999 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flllr\" (UniqueName: \"kubernetes.io/projected/7e2f5c73-7275-4235-8577-09c4c10cdfb4-kube-api-access-flllr\") pod \"collect-profiles-29426610-wvnmw\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.457187 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:00 crc kubenswrapper[5070]: I1213 03:30:00.927838 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw"] Dec 13 03:30:01 crc kubenswrapper[5070]: W1213 03:30:01.141547 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e2f5c73_7275_4235_8577_09c4c10cdfb4.slice/crio-ef4fcb6bea96ce2f13b0baeb17af9560d36010848c1a2f2cf95889ecfd9c795d WatchSource:0}: Error finding container ef4fcb6bea96ce2f13b0baeb17af9560d36010848c1a2f2cf95889ecfd9c795d: Status 404 returned error can't find the container with id ef4fcb6bea96ce2f13b0baeb17af9560d36010848c1a2f2cf95889ecfd9c795d Dec 13 03:30:01 crc kubenswrapper[5070]: I1213 03:30:01.644493 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:30:02 crc kubenswrapper[5070]: I1213 03:30:02.121465 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" event={"ID":"7e2f5c73-7275-4235-8577-09c4c10cdfb4","Type":"ContainerStarted","Data":"ded122847230ffaa8f7c1dabc241665ebac8a6034eeac1cc2d992e75dcd74de8"} Dec 13 03:30:02 crc kubenswrapper[5070]: I1213 03:30:02.121774 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" event={"ID":"7e2f5c73-7275-4235-8577-09c4c10cdfb4","Type":"ContainerStarted","Data":"ef4fcb6bea96ce2f13b0baeb17af9560d36010848c1a2f2cf95889ecfd9c795d"} Dec 13 03:30:02 crc kubenswrapper[5070]: I1213 03:30:02.137239 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" podStartSLOduration=2.137218625 podStartE2EDuration="2.137218625s" podCreationTimestamp="2025-12-13 03:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:30:02.136707651 +0000 UTC m=+1094.372551197" watchObservedRunningTime="2025-12-13 03:30:02.137218625 +0000 UTC m=+1094.373062171" Dec 13 03:30:03 crc kubenswrapper[5070]: I1213 03:30:03.135321 5070 generic.go:334] "Generic (PLEG): container finished" podID="7e2f5c73-7275-4235-8577-09c4c10cdfb4" containerID="ded122847230ffaa8f7c1dabc241665ebac8a6034eeac1cc2d992e75dcd74de8" exitCode=0 Dec 13 03:30:03 crc kubenswrapper[5070]: I1213 03:30:03.135368 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" event={"ID":"7e2f5c73-7275-4235-8577-09c4c10cdfb4","Type":"ContainerDied","Data":"ded122847230ffaa8f7c1dabc241665ebac8a6034eeac1cc2d992e75dcd74de8"} Dec 13 03:30:07 crc kubenswrapper[5070]: I1213 03:30:07.384358 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:07 crc kubenswrapper[5070]: I1213 03:30:07.496481 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flllr\" (UniqueName: \"kubernetes.io/projected/7e2f5c73-7275-4235-8577-09c4c10cdfb4-kube-api-access-flllr\") pod \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " Dec 13 03:30:07 crc kubenswrapper[5070]: I1213 03:30:07.496580 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e2f5c73-7275-4235-8577-09c4c10cdfb4-config-volume\") pod \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " Dec 13 03:30:07 crc kubenswrapper[5070]: I1213 03:30:07.496652 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e2f5c73-7275-4235-8577-09c4c10cdfb4-secret-volume\") pod \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\" (UID: \"7e2f5c73-7275-4235-8577-09c4c10cdfb4\") " Dec 13 03:30:07 crc kubenswrapper[5070]: I1213 03:30:07.497585 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e2f5c73-7275-4235-8577-09c4c10cdfb4-config-volume" (OuterVolumeSpecName: "config-volume") pod "7e2f5c73-7275-4235-8577-09c4c10cdfb4" (UID: "7e2f5c73-7275-4235-8577-09c4c10cdfb4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:30:07 crc kubenswrapper[5070]: I1213 03:30:07.503067 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e2f5c73-7275-4235-8577-09c4c10cdfb4-kube-api-access-flllr" (OuterVolumeSpecName: "kube-api-access-flllr") pod "7e2f5c73-7275-4235-8577-09c4c10cdfb4" (UID: "7e2f5c73-7275-4235-8577-09c4c10cdfb4"). InnerVolumeSpecName "kube-api-access-flllr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:07 crc kubenswrapper[5070]: I1213 03:30:07.515743 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e2f5c73-7275-4235-8577-09c4c10cdfb4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7e2f5c73-7275-4235-8577-09c4c10cdfb4" (UID: "7e2f5c73-7275-4235-8577-09c4c10cdfb4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:07 crc kubenswrapper[5070]: I1213 03:30:07.598461 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flllr\" (UniqueName: \"kubernetes.io/projected/7e2f5c73-7275-4235-8577-09c4c10cdfb4-kube-api-access-flllr\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:07 crc kubenswrapper[5070]: I1213 03:30:07.598495 5070 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e2f5c73-7275-4235-8577-09c4c10cdfb4-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:07 crc kubenswrapper[5070]: I1213 03:30:07.598505 5070 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e2f5c73-7275-4235-8577-09c4c10cdfb4-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:08 crc kubenswrapper[5070]: I1213 03:30:08.179076 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" Dec 13 03:30:08 crc kubenswrapper[5070]: I1213 03:30:08.179855 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw" event={"ID":"7e2f5c73-7275-4235-8577-09c4c10cdfb4","Type":"ContainerDied","Data":"ef4fcb6bea96ce2f13b0baeb17af9560d36010848c1a2f2cf95889ecfd9c795d"} Dec 13 03:30:08 crc kubenswrapper[5070]: I1213 03:30:08.179885 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef4fcb6bea96ce2f13b0baeb17af9560d36010848c1a2f2cf95889ecfd9c795d" Dec 13 03:30:15 crc kubenswrapper[5070]: I1213 03:30:15.321646 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6ff9d66794-qds4v" Dec 13 03:30:20 crc kubenswrapper[5070]: E1213 03:30:20.681567 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 13 03:30:20 crc kubenswrapper[5070]: E1213 03:30:20.682294 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rhskf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-csqcx_openstack(759f7e37-a2f6-4a1b-a220-24397c94b928): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:30:20 crc kubenswrapper[5070]: E1213 03:30:20.683488 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-csqcx" podUID="759f7e37-a2f6-4a1b-a220-24397c94b928" Dec 13 03:30:21 crc kubenswrapper[5070]: E1213 03:30:21.312283 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-csqcx" podUID="759f7e37-a2f6-4a1b-a220-24397c94b928" Dec 13 03:30:21 crc kubenswrapper[5070]: E1213 03:30:21.698418 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Dec 13 03:30:21 crc kubenswrapper[5070]: E1213 03:30:21.698631 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-729cc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(62471d23-7b26-439a-9a26-e65abe4be2c1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 03:30:21 crc kubenswrapper[5070]: E1213 03:30:21.700584 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="62471d23-7b26-439a-9a26-e65abe4be2c1" Dec 13 03:30:22 crc kubenswrapper[5070]: I1213 03:30:22.316744 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62471d23-7b26-439a-9a26-e65abe4be2c1" containerName="ceilometer-central-agent" containerID="cri-o://fa180f2a9f89d575b0dbc31f469ceef043ded814f2ca2631bdd8c215ba12b2bb" gracePeriod=30 Dec 13 03:30:22 crc kubenswrapper[5070]: I1213 03:30:22.318694 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62471d23-7b26-439a-9a26-e65abe4be2c1" containerName="ceilometer-notification-agent" containerID="cri-o://33cfaac0d264e2b7383afbcae98b1f49c91329b9f2e464dbe3a06848b14d524b" gracePeriod=30 Dec 13 03:30:22 crc kubenswrapper[5070]: E1213 03:30:22.966081 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 13 03:30:22 crc kubenswrapper[5070]: E1213 03:30:22.966233 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9r6jl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-n7ll5_openstack(64c718c6-de73-4e08-9506-dce5dc9ebffd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 03:30:22 crc kubenswrapper[5070]: E1213 03:30:22.967996 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-n7ll5" podUID="64c718c6-de73-4e08-9506-dce5dc9ebffd" Dec 13 03:30:23 crc kubenswrapper[5070]: I1213 03:30:23.331159 5070 generic.go:334] "Generic (PLEG): container finished" podID="62471d23-7b26-439a-9a26-e65abe4be2c1" containerID="fa180f2a9f89d575b0dbc31f469ceef043ded814f2ca2631bdd8c215ba12b2bb" exitCode=0 Dec 13 03:30:23 crc kubenswrapper[5070]: I1213 03:30:23.331276 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62471d23-7b26-439a-9a26-e65abe4be2c1","Type":"ContainerDied","Data":"fa180f2a9f89d575b0dbc31f469ceef043ded814f2ca2631bdd8c215ba12b2bb"} Dec 13 03:30:23 crc kubenswrapper[5070]: E1213 03:30:23.333039 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-n7ll5" podUID="64c718c6-de73-4e08-9506-dce5dc9ebffd" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.359904 5070 generic.go:334] "Generic (PLEG): container finished" podID="62471d23-7b26-439a-9a26-e65abe4be2c1" containerID="33cfaac0d264e2b7383afbcae98b1f49c91329b9f2e464dbe3a06848b14d524b" exitCode=0 Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.359953 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62471d23-7b26-439a-9a26-e65abe4be2c1","Type":"ContainerDied","Data":"33cfaac0d264e2b7383afbcae98b1f49c91329b9f2e464dbe3a06848b14d524b"} Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.528426 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.628265 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-combined-ca-bundle\") pod \"62471d23-7b26-439a-9a26-e65abe4be2c1\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.628355 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-scripts\") pod \"62471d23-7b26-439a-9a26-e65abe4be2c1\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.628387 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-run-httpd\") pod \"62471d23-7b26-439a-9a26-e65abe4be2c1\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.628501 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-sg-core-conf-yaml\") pod \"62471d23-7b26-439a-9a26-e65abe4be2c1\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.628532 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-log-httpd\") pod \"62471d23-7b26-439a-9a26-e65abe4be2c1\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.628555 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-config-data\") pod \"62471d23-7b26-439a-9a26-e65abe4be2c1\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.628607 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-729cc\" (UniqueName: \"kubernetes.io/projected/62471d23-7b26-439a-9a26-e65abe4be2c1-kube-api-access-729cc\") pod \"62471d23-7b26-439a-9a26-e65abe4be2c1\" (UID: \"62471d23-7b26-439a-9a26-e65abe4be2c1\") " Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.629377 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "62471d23-7b26-439a-9a26-e65abe4be2c1" (UID: "62471d23-7b26-439a-9a26-e65abe4be2c1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.629670 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "62471d23-7b26-439a-9a26-e65abe4be2c1" (UID: "62471d23-7b26-439a-9a26-e65abe4be2c1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.634815 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62471d23-7b26-439a-9a26-e65abe4be2c1-kube-api-access-729cc" (OuterVolumeSpecName: "kube-api-access-729cc") pod "62471d23-7b26-439a-9a26-e65abe4be2c1" (UID: "62471d23-7b26-439a-9a26-e65abe4be2c1"). InnerVolumeSpecName "kube-api-access-729cc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.635920 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "62471d23-7b26-439a-9a26-e65abe4be2c1" (UID: "62471d23-7b26-439a-9a26-e65abe4be2c1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.635981 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-scripts" (OuterVolumeSpecName: "scripts") pod "62471d23-7b26-439a-9a26-e65abe4be2c1" (UID: "62471d23-7b26-439a-9a26-e65abe4be2c1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.678846 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62471d23-7b26-439a-9a26-e65abe4be2c1" (UID: "62471d23-7b26-439a-9a26-e65abe4be2c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.684956 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-config-data" (OuterVolumeSpecName: "config-data") pod "62471d23-7b26-439a-9a26-e65abe4be2c1" (UID: "62471d23-7b26-439a-9a26-e65abe4be2c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.730744 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-729cc\" (UniqueName: \"kubernetes.io/projected/62471d23-7b26-439a-9a26-e65abe4be2c1-kube-api-access-729cc\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.730996 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.731089 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.731178 5070 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.731266 5070 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.731355 5070 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62471d23-7b26-439a-9a26-e65abe4be2c1-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:26 crc kubenswrapper[5070]: I1213 03:30:26.731493 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62471d23-7b26-439a-9a26-e65abe4be2c1-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.374980 5070 generic.go:334] "Generic (PLEG): container finished" podID="e3c77f64-0733-4569-b3d9-f112d9e1d02b" containerID="4f20ee785171c7c656f65f747eb262791a9a0023f2a95864f119504c654dd5f8" exitCode=0 Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.375075 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vtvhk" event={"ID":"e3c77f64-0733-4569-b3d9-f112d9e1d02b","Type":"ContainerDied","Data":"4f20ee785171c7c656f65f747eb262791a9a0023f2a95864f119504c654dd5f8"} Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.379627 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62471d23-7b26-439a-9a26-e65abe4be2c1","Type":"ContainerDied","Data":"cfd54d3ab9e394543f50fd49ff7def0bd1f45003063c3275399f7d3559d97134"} Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.379707 5070 scope.go:117] "RemoveContainer" containerID="33cfaac0d264e2b7383afbcae98b1f49c91329b9f2e464dbe3a06848b14d524b" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.379919 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.420220 5070 scope.go:117] "RemoveContainer" containerID="fa180f2a9f89d575b0dbc31f469ceef043ded814f2ca2631bdd8c215ba12b2bb" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.477682 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.487034 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.504082 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:27 crc kubenswrapper[5070]: E1213 03:30:27.504626 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62471d23-7b26-439a-9a26-e65abe4be2c1" containerName="ceilometer-central-agent" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.504647 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="62471d23-7b26-439a-9a26-e65abe4be2c1" containerName="ceilometer-central-agent" Dec 13 03:30:27 crc kubenswrapper[5070]: E1213 03:30:27.504666 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e2f5c73-7275-4235-8577-09c4c10cdfb4" containerName="collect-profiles" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.504674 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e2f5c73-7275-4235-8577-09c4c10cdfb4" containerName="collect-profiles" Dec 13 03:30:27 crc kubenswrapper[5070]: E1213 03:30:27.504717 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62471d23-7b26-439a-9a26-e65abe4be2c1" containerName="ceilometer-notification-agent" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.504724 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="62471d23-7b26-439a-9a26-e65abe4be2c1" containerName="ceilometer-notification-agent" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.504913 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e2f5c73-7275-4235-8577-09c4c10cdfb4" containerName="collect-profiles" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.504953 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="62471d23-7b26-439a-9a26-e65abe4be2c1" containerName="ceilometer-notification-agent" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.504964 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="62471d23-7b26-439a-9a26-e65abe4be2c1" containerName="ceilometer-central-agent" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.508612 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.511279 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.511369 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.518865 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.646822 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-log-httpd\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.646957 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-scripts\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.647063 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-config-data\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.647093 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvdh8\" (UniqueName: \"kubernetes.io/projected/8009852b-9899-44f8-95ee-bf5b03dd8fc7-kube-api-access-fvdh8\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.647161 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-run-httpd\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.647188 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.647230 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.749386 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-log-httpd\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.749992 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-log-httpd\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.750209 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-scripts\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.750274 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-config-data\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.750354 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvdh8\" (UniqueName: \"kubernetes.io/projected/8009852b-9899-44f8-95ee-bf5b03dd8fc7-kube-api-access-fvdh8\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.750376 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-run-httpd\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.751011 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.751085 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-run-httpd\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.751315 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.756743 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.757862 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-config-data\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.761855 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-scripts\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.763164 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.768275 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvdh8\" (UniqueName: \"kubernetes.io/projected/8009852b-9899-44f8-95ee-bf5b03dd8fc7-kube-api-access-fvdh8\") pod \"ceilometer-0\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " pod="openstack/ceilometer-0" Dec 13 03:30:27 crc kubenswrapper[5070]: I1213 03:30:27.833844 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.101503 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.178856 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62471d23-7b26-439a-9a26-e65abe4be2c1" path="/var/lib/kubelet/pods/62471d23-7b26-439a-9a26-e65abe4be2c1/volumes" Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.393327 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8009852b-9899-44f8-95ee-bf5b03dd8fc7","Type":"ContainerStarted","Data":"a0c0ea59738082c2f3688da8eb19770a130f6ee1ecc70bc1c21247e2aee87b6d"} Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.650992 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.764825 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-config\") pod \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.765074 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-combined-ca-bundle\") pod \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.765141 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwlr5\" (UniqueName: \"kubernetes.io/projected/e3c77f64-0733-4569-b3d9-f112d9e1d02b-kube-api-access-qwlr5\") pod \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\" (UID: \"e3c77f64-0733-4569-b3d9-f112d9e1d02b\") " Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.772770 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3c77f64-0733-4569-b3d9-f112d9e1d02b-kube-api-access-qwlr5" (OuterVolumeSpecName: "kube-api-access-qwlr5") pod "e3c77f64-0733-4569-b3d9-f112d9e1d02b" (UID: "e3c77f64-0733-4569-b3d9-f112d9e1d02b"). InnerVolumeSpecName "kube-api-access-qwlr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.799803 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-config" (OuterVolumeSpecName: "config") pod "e3c77f64-0733-4569-b3d9-f112d9e1d02b" (UID: "e3c77f64-0733-4569-b3d9-f112d9e1d02b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.810016 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3c77f64-0733-4569-b3d9-f112d9e1d02b" (UID: "e3c77f64-0733-4569-b3d9-f112d9e1d02b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.867599 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.867636 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwlr5\" (UniqueName: \"kubernetes.io/projected/e3c77f64-0733-4569-b3d9-f112d9e1d02b-kube-api-access-qwlr5\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:28 crc kubenswrapper[5070]: I1213 03:30:28.867646 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e3c77f64-0733-4569-b3d9-f112d9e1d02b-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.428086 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vtvhk" event={"ID":"e3c77f64-0733-4569-b3d9-f112d9e1d02b","Type":"ContainerDied","Data":"e4fb2b9f2a1bd73488cf02220ff310808a13ff0011195c0fd8b5356f8b70af98"} Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.428487 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4fb2b9f2a1bd73488cf02220ff310808a13ff0011195c0fd8b5356f8b70af98" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.428116 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vtvhk" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.429739 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8009852b-9899-44f8-95ee-bf5b03dd8fc7","Type":"ContainerStarted","Data":"c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04"} Dec 13 03:30:29 crc kubenswrapper[5070]: E1213 03:30:29.574105 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3c77f64_0733_4569_b3d9_f112d9e1d02b.slice\": RecentStats: unable to find data in memory cache]" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.686686 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-89998"] Dec 13 03:30:29 crc kubenswrapper[5070]: E1213 03:30:29.687051 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3c77f64-0733-4569-b3d9-f112d9e1d02b" containerName="neutron-db-sync" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.687073 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3c77f64-0733-4569-b3d9-f112d9e1d02b" containerName="neutron-db-sync" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.687292 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3c77f64-0733-4569-b3d9-f112d9e1d02b" containerName="neutron-db-sync" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.688163 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.758017 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-89998"] Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.774263 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-64f646c4fb-ktrsv"] Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.781551 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.784149 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.786976 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.787054 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.787127 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cp87m\" (UniqueName: \"kubernetes.io/projected/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-kube-api-access-cp87m\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.787152 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.787238 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-config\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.789800 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-64f646c4fb-ktrsv"] Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.791631 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.791910 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.792162 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-vsg2q" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.888265 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.888346 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wjh5\" (UniqueName: \"kubernetes.io/projected/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-kube-api-access-4wjh5\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.888379 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-httpd-config\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.888410 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-ovndb-tls-certs\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.888482 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-combined-ca-bundle\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.888512 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.888588 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-config\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.888634 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cp87m\" (UniqueName: \"kubernetes.io/projected/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-kube-api-access-cp87m\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.888670 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.888707 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-config\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.889598 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-config\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.890164 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.890963 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.891213 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.916517 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cp87m\" (UniqueName: \"kubernetes.io/projected/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-kube-api-access-cp87m\") pod \"dnsmasq-dns-5f66db59b9-89998\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:29 crc kubenswrapper[5070]: I1213 03:30:29.948421 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-58cd5c7d4f-c98c8" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.002225 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-ovndb-tls-certs\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.002570 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-combined-ca-bundle\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.002637 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-config\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.002706 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wjh5\" (UniqueName: \"kubernetes.io/projected/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-kube-api-access-4wjh5\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.002723 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-httpd-config\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.009203 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.010270 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-combined-ca-bundle\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.011044 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-ovndb-tls-certs\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.016541 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-config\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.019632 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-httpd-config\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.032554 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wjh5\" (UniqueName: \"kubernetes.io/projected/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-kube-api-access-4wjh5\") pod \"neutron-64f646c4fb-ktrsv\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.122481 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.439104 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8009852b-9899-44f8-95ee-bf5b03dd8fc7","Type":"ContainerStarted","Data":"74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c"} Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.439323 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8009852b-9899-44f8-95ee-bf5b03dd8fc7","Type":"ContainerStarted","Data":"ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270"} Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.582582 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-89998"] Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.813353 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.814915 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.818530 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.818740 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-8nzhm" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.819016 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.824605 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.899839 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-64f646c4fb-ktrsv"] Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.925287 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcntm\" (UniqueName: \"kubernetes.io/projected/661791e7-2fbe-4f8d-88f2-88418da79df8-kube-api-access-hcntm\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.925341 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.925363 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:30 crc kubenswrapper[5070]: I1213 03:30:30.925395 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config-secret\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.027612 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcntm\" (UniqueName: \"kubernetes.io/projected/661791e7-2fbe-4f8d-88f2-88418da79df8-kube-api-access-hcntm\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.027974 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.027999 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.028575 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config-secret\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.029128 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.031927 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config-secret\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.032007 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.048952 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcntm\" (UniqueName: \"kubernetes.io/projected/661791e7-2fbe-4f8d-88f2-88418da79df8-kube-api-access-hcntm\") pod \"openstackclient\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.111255 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.112365 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.118383 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.156432 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.157793 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.168422 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.232999 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf8gg\" (UniqueName: \"kubernetes.io/projected/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-kube-api-access-bf8gg\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.233078 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-openstack-config\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.234594 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.234898 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-openstack-config-secret\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: E1213 03:30:31.288492 5070 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 13 03:30:31 crc kubenswrapper[5070]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_661791e7-2fbe-4f8d-88f2-88418da79df8_0(eed0c05ce3f887b33bf7bb340d6a0dd085f9ca2e436e9c4ca3ef5128a0f39498): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"eed0c05ce3f887b33bf7bb340d6a0dd085f9ca2e436e9c4ca3ef5128a0f39498" Netns:"/var/run/netns/63f64281-8e6e-4737-a341-3b9c02f80aad" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=eed0c05ce3f887b33bf7bb340d6a0dd085f9ca2e436e9c4ca3ef5128a0f39498;K8S_POD_UID=661791e7-2fbe-4f8d-88f2-88418da79df8" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/661791e7-2fbe-4f8d-88f2-88418da79df8]: expected pod UID "661791e7-2fbe-4f8d-88f2-88418da79df8" but got "0d0b3d1f-385d-47e5-af36-d016c5b9cd1b" from Kube API Dec 13 03:30:31 crc kubenswrapper[5070]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 03:30:31 crc kubenswrapper[5070]: > Dec 13 03:30:31 crc kubenswrapper[5070]: E1213 03:30:31.288567 5070 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 13 03:30:31 crc kubenswrapper[5070]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_661791e7-2fbe-4f8d-88f2-88418da79df8_0(eed0c05ce3f887b33bf7bb340d6a0dd085f9ca2e436e9c4ca3ef5128a0f39498): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"eed0c05ce3f887b33bf7bb340d6a0dd085f9ca2e436e9c4ca3ef5128a0f39498" Netns:"/var/run/netns/63f64281-8e6e-4737-a341-3b9c02f80aad" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=eed0c05ce3f887b33bf7bb340d6a0dd085f9ca2e436e9c4ca3ef5128a0f39498;K8S_POD_UID=661791e7-2fbe-4f8d-88f2-88418da79df8" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/661791e7-2fbe-4f8d-88f2-88418da79df8]: expected pod UID "661791e7-2fbe-4f8d-88f2-88418da79df8" but got "0d0b3d1f-385d-47e5-af36-d016c5b9cd1b" from Kube API Dec 13 03:30:31 crc kubenswrapper[5070]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 03:30:31 crc kubenswrapper[5070]: > pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.337823 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf8gg\" (UniqueName: \"kubernetes.io/projected/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-kube-api-access-bf8gg\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.337885 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-openstack-config\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.337928 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.338035 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-openstack-config-secret\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.342406 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-openstack-config-secret\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.345000 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-openstack-config\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.351237 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.365297 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf8gg\" (UniqueName: \"kubernetes.io/projected/0d0b3d1f-385d-47e5-af36-d016c5b9cd1b-kube-api-access-bf8gg\") pod \"openstackclient\" (UID: \"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b\") " pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.450485 5070 generic.go:334] "Generic (PLEG): container finished" podID="6e76ff7f-507c-4d7f-baf0-b1967eae5fff" containerID="ea49936432c1ff6650754b48500ee832625315fe5d6f606827d5dbf62ff16a94" exitCode=0 Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.450539 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-89998" event={"ID":"6e76ff7f-507c-4d7f-baf0-b1967eae5fff","Type":"ContainerDied","Data":"ea49936432c1ff6650754b48500ee832625315fe5d6f606827d5dbf62ff16a94"} Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.450566 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-89998" event={"ID":"6e76ff7f-507c-4d7f-baf0-b1967eae5fff","Type":"ContainerStarted","Data":"afdfdb49e2d63132f8248afd5540b5360242547ce02cffdceec60b24616d14ac"} Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.455750 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.455811 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64f646c4fb-ktrsv" event={"ID":"d7d71324-fddc-4a3a-ab67-164b8d90b2d3","Type":"ContainerStarted","Data":"a54a2e955030fa01e6711e092c1c18c7b1e5d91787c10c59117de8ab23fd71c1"} Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.455843 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64f646c4fb-ktrsv" event={"ID":"d7d71324-fddc-4a3a-ab67-164b8d90b2d3","Type":"ContainerStarted","Data":"9b5413f46bce5d9fb208db75baa625809e81164c0f0a7c77e14239e069537396"} Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.455878 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64f646c4fb-ktrsv" event={"ID":"d7d71324-fddc-4a3a-ab67-164b8d90b2d3","Type":"ContainerStarted","Data":"7e62ff28075d74ff673f742def0d62d2df3ac933d49687123c191c212a95b1df"} Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.461951 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.473773 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.478302 5070 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="661791e7-2fbe-4f8d-88f2-88418da79df8" podUID="0d0b3d1f-385d-47e5-af36-d016c5b9cd1b" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.486765 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.506724 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-64f646c4fb-ktrsv" podStartSLOduration=2.506704474 podStartE2EDuration="2.506704474s" podCreationTimestamp="2025-12-13 03:30:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:30:31.496622058 +0000 UTC m=+1123.732465624" watchObservedRunningTime="2025-12-13 03:30:31.506704474 +0000 UTC m=+1123.742548020" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.544181 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcntm\" (UniqueName: \"kubernetes.io/projected/661791e7-2fbe-4f8d-88f2-88418da79df8-kube-api-access-hcntm\") pod \"661791e7-2fbe-4f8d-88f2-88418da79df8\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.544272 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-combined-ca-bundle\") pod \"661791e7-2fbe-4f8d-88f2-88418da79df8\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.544311 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config-secret\") pod \"661791e7-2fbe-4f8d-88f2-88418da79df8\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.544537 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config\") pod \"661791e7-2fbe-4f8d-88f2-88418da79df8\" (UID: \"661791e7-2fbe-4f8d-88f2-88418da79df8\") " Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.548142 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "661791e7-2fbe-4f8d-88f2-88418da79df8" (UID: "661791e7-2fbe-4f8d-88f2-88418da79df8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.549709 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "661791e7-2fbe-4f8d-88f2-88418da79df8" (UID: "661791e7-2fbe-4f8d-88f2-88418da79df8"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.550557 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/661791e7-2fbe-4f8d-88f2-88418da79df8-kube-api-access-hcntm" (OuterVolumeSpecName: "kube-api-access-hcntm") pod "661791e7-2fbe-4f8d-88f2-88418da79df8" (UID: "661791e7-2fbe-4f8d-88f2-88418da79df8"). InnerVolumeSpecName "kube-api-access-hcntm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.552663 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "661791e7-2fbe-4f8d-88f2-88418da79df8" (UID: "661791e7-2fbe-4f8d-88f2-88418da79df8"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.650515 5070 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.650548 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcntm\" (UniqueName: \"kubernetes.io/projected/661791e7-2fbe-4f8d-88f2-88418da79df8-kube-api-access-hcntm\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.650559 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.650568 5070 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/661791e7-2fbe-4f8d-88f2-88418da79df8-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.903514 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-54586d498f-pgsrg"] Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.905232 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.909067 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.909392 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 13 03:30:31 crc kubenswrapper[5070]: I1213 03:30:31.917458 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-54586d498f-pgsrg"] Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.067805 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-ovndb-tls-certs\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.067861 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-config\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.067937 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-public-tls-certs\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.068013 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdpsk\" (UniqueName: \"kubernetes.io/projected/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-kube-api-access-hdpsk\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.068060 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-httpd-config\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.068079 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-combined-ca-bundle\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.068261 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-internal-tls-certs\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.103392 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 13 03:30:32 crc kubenswrapper[5070]: W1213 03:30:32.120739 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d0b3d1f_385d_47e5_af36_d016c5b9cd1b.slice/crio-b82aad87beb2ce8509712ee678535340f61e195feeaef62cbd555d3430c6095b WatchSource:0}: Error finding container b82aad87beb2ce8509712ee678535340f61e195feeaef62cbd555d3430c6095b: Status 404 returned error can't find the container with id b82aad87beb2ce8509712ee678535340f61e195feeaef62cbd555d3430c6095b Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.170099 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-ovndb-tls-certs\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.170166 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-config\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.170225 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-public-tls-certs\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.170285 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdpsk\" (UniqueName: \"kubernetes.io/projected/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-kube-api-access-hdpsk\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.170339 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-httpd-config\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.170373 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-combined-ca-bundle\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.170426 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-internal-tls-certs\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.177186 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-public-tls-certs\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.178151 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-httpd-config\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.180173 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-ovndb-tls-certs\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.181145 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="661791e7-2fbe-4f8d-88f2-88418da79df8" path="/var/lib/kubelet/pods/661791e7-2fbe-4f8d-88f2-88418da79df8/volumes" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.187796 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-config\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.189925 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-combined-ca-bundle\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.192156 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-internal-tls-certs\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.192955 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdpsk\" (UniqueName: \"kubernetes.io/projected/93c84134-01b6-453a-95d9-7f2a3e9a4f3d-kube-api-access-hdpsk\") pod \"neutron-54586d498f-pgsrg\" (UID: \"93c84134-01b6-453a-95d9-7f2a3e9a4f3d\") " pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.381490 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.484418 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8009852b-9899-44f8-95ee-bf5b03dd8fc7","Type":"ContainerStarted","Data":"1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2"} Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.484847 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.485850 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b","Type":"ContainerStarted","Data":"b82aad87beb2ce8509712ee678535340f61e195feeaef62cbd555d3430c6095b"} Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.487616 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.487611 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-89998" event={"ID":"6e76ff7f-507c-4d7f-baf0-b1967eae5fff","Type":"ContainerStarted","Data":"6085670a963d85e0967bff96dea16f6732e714a003a6b832f2249a3ead6eb11d"} Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.488150 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.518961 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.204870664 podStartE2EDuration="5.518938044s" podCreationTimestamp="2025-12-13 03:30:27 +0000 UTC" firstStartedPulling="2025-12-13 03:30:28.113134541 +0000 UTC m=+1120.348978087" lastFinishedPulling="2025-12-13 03:30:31.427201921 +0000 UTC m=+1123.663045467" observedRunningTime="2025-12-13 03:30:32.513101264 +0000 UTC m=+1124.748944810" watchObservedRunningTime="2025-12-13 03:30:32.518938044 +0000 UTC m=+1124.754781590" Dec 13 03:30:32 crc kubenswrapper[5070]: I1213 03:30:32.534720 5070 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="661791e7-2fbe-4f8d-88f2-88418da79df8" podUID="0d0b3d1f-385d-47e5-af36-d016c5b9cd1b" Dec 13 03:30:33 crc kubenswrapper[5070]: I1213 03:30:33.037791 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f66db59b9-89998" podStartSLOduration=4.037774501 podStartE2EDuration="4.037774501s" podCreationTimestamp="2025-12-13 03:30:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:30:32.531941039 +0000 UTC m=+1124.767784585" watchObservedRunningTime="2025-12-13 03:30:33.037774501 +0000 UTC m=+1125.273618047" Dec 13 03:30:33 crc kubenswrapper[5070]: I1213 03:30:33.044551 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-54586d498f-pgsrg"] Dec 13 03:30:33 crc kubenswrapper[5070]: W1213 03:30:33.064183 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93c84134_01b6_453a_95d9_7f2a3e9a4f3d.slice/crio-a348dac3f05cd15ad31d77ec81c6605c6fa24cb6da58c6c69181d256eac13639 WatchSource:0}: Error finding container a348dac3f05cd15ad31d77ec81c6605c6fa24cb6da58c6c69181d256eac13639: Status 404 returned error can't find the container with id a348dac3f05cd15ad31d77ec81c6605c6fa24cb6da58c6c69181d256eac13639 Dec 13 03:30:33 crc kubenswrapper[5070]: I1213 03:30:33.503466 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54586d498f-pgsrg" event={"ID":"93c84134-01b6-453a-95d9-7f2a3e9a4f3d","Type":"ContainerStarted","Data":"8ae9808776bb17dd86e447b3e10cb689808f7ab73fb8860fa761a4a699f1b46f"} Dec 13 03:30:33 crc kubenswrapper[5070]: I1213 03:30:33.503807 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54586d498f-pgsrg" event={"ID":"93c84134-01b6-453a-95d9-7f2a3e9a4f3d","Type":"ContainerStarted","Data":"a348dac3f05cd15ad31d77ec81c6605c6fa24cb6da58c6c69181d256eac13639"} Dec 13 03:30:34 crc kubenswrapper[5070]: I1213 03:30:34.515505 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54586d498f-pgsrg" event={"ID":"93c84134-01b6-453a-95d9-7f2a3e9a4f3d","Type":"ContainerStarted","Data":"d4bfbc8ddc84c62616cddd6e3f92affc70d0fdeb40d75596edcffe4552be651c"} Dec 13 03:30:34 crc kubenswrapper[5070]: I1213 03:30:34.515948 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:30:34 crc kubenswrapper[5070]: I1213 03:30:34.517857 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-csqcx" event={"ID":"759f7e37-a2f6-4a1b-a220-24397c94b928","Type":"ContainerStarted","Data":"cdc3c34597e656a1c9846b041a60e66893cbca6229fcec6c52c214665c0cb39a"} Dec 13 03:30:34 crc kubenswrapper[5070]: I1213 03:30:34.537863 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-54586d498f-pgsrg" podStartSLOduration=3.5378442420000002 podStartE2EDuration="3.537844242s" podCreationTimestamp="2025-12-13 03:30:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:30:34.536261868 +0000 UTC m=+1126.772105414" watchObservedRunningTime="2025-12-13 03:30:34.537844242 +0000 UTC m=+1126.773687788" Dec 13 03:30:34 crc kubenswrapper[5070]: I1213 03:30:34.561226 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-csqcx" podStartSLOduration=13.234813206 podStartE2EDuration="48.56120085s" podCreationTimestamp="2025-12-13 03:29:46 +0000 UTC" firstStartedPulling="2025-12-13 03:29:58.316681157 +0000 UTC m=+1090.552524703" lastFinishedPulling="2025-12-13 03:30:33.643068801 +0000 UTC m=+1125.878912347" observedRunningTime="2025-12-13 03:30:34.553483619 +0000 UTC m=+1126.789327165" watchObservedRunningTime="2025-12-13 03:30:34.56120085 +0000 UTC m=+1126.797044396" Dec 13 03:30:37 crc kubenswrapper[5070]: I1213 03:30:37.541834 5070 generic.go:334] "Generic (PLEG): container finished" podID="759f7e37-a2f6-4a1b-a220-24397c94b928" containerID="cdc3c34597e656a1c9846b041a60e66893cbca6229fcec6c52c214665c0cb39a" exitCode=0 Dec 13 03:30:37 crc kubenswrapper[5070]: I1213 03:30:37.542165 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-csqcx" event={"ID":"759f7e37-a2f6-4a1b-a220-24397c94b928","Type":"ContainerDied","Data":"cdc3c34597e656a1c9846b041a60e66893cbca6229fcec6c52c214665c0cb39a"} Dec 13 03:30:40 crc kubenswrapper[5070]: I1213 03:30:40.011698 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:30:40 crc kubenswrapper[5070]: I1213 03:30:40.077867 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr"] Dec 13 03:30:40 crc kubenswrapper[5070]: I1213 03:30:40.078146 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" podUID="8b97be4b-4027-4f04-b290-eca32cde927b" containerName="dnsmasq-dns" containerID="cri-o://7a6fef5ee68cc0dcb9d8e0e8fe72913249e8cb020691505d5822f10c8b21696f" gracePeriod=10 Dec 13 03:30:40 crc kubenswrapper[5070]: I1213 03:30:40.574290 5070 generic.go:334] "Generic (PLEG): container finished" podID="8b97be4b-4027-4f04-b290-eca32cde927b" containerID="7a6fef5ee68cc0dcb9d8e0e8fe72913249e8cb020691505d5822f10c8b21696f" exitCode=0 Dec 13 03:30:40 crc kubenswrapper[5070]: I1213 03:30:40.574344 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" event={"ID":"8b97be4b-4027-4f04-b290-eca32cde927b","Type":"ContainerDied","Data":"7a6fef5ee68cc0dcb9d8e0e8fe72913249e8cb020691505d5822f10c8b21696f"} Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.403435 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-jhj27"] Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.405802 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-jhj27" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.426554 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-jhj27"] Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.461427 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-csqcx" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.507307 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-g6ft6"] Dec 13 03:30:41 crc kubenswrapper[5070]: E1213 03:30:41.507725 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="759f7e37-a2f6-4a1b-a220-24397c94b928" containerName="barbican-db-sync" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.507745 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="759f7e37-a2f6-4a1b-a220-24397c94b928" containerName="barbican-db-sync" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.507914 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="759f7e37-a2f6-4a1b-a220-24397c94b928" containerName="barbican-db-sync" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.508422 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-g6ft6" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.516463 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-g6ft6"] Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.560801 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-db-sync-config-data\") pod \"759f7e37-a2f6-4a1b-a220-24397c94b928\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.560979 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhskf\" (UniqueName: \"kubernetes.io/projected/759f7e37-a2f6-4a1b-a220-24397c94b928-kube-api-access-rhskf\") pod \"759f7e37-a2f6-4a1b-a220-24397c94b928\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.561005 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-combined-ca-bundle\") pod \"759f7e37-a2f6-4a1b-a220-24397c94b928\" (UID: \"759f7e37-a2f6-4a1b-a220-24397c94b928\") " Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.561196 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qft44\" (UniqueName: \"kubernetes.io/projected/79694a00-eed5-4e8f-8cff-bf85dced4d4a-kube-api-access-qft44\") pod \"nova-api-db-create-jhj27\" (UID: \"79694a00-eed5-4e8f-8cff-bf85dced4d4a\") " pod="openstack/nova-api-db-create-jhj27" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.583903 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "759f7e37-a2f6-4a1b-a220-24397c94b928" (UID: "759f7e37-a2f6-4a1b-a220-24397c94b928"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.593540 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/759f7e37-a2f6-4a1b-a220-24397c94b928-kube-api-access-rhskf" (OuterVolumeSpecName: "kube-api-access-rhskf") pod "759f7e37-a2f6-4a1b-a220-24397c94b928" (UID: "759f7e37-a2f6-4a1b-a220-24397c94b928"). InnerVolumeSpecName "kube-api-access-rhskf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.625103 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-mp95c"] Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.626698 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mp95c" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.630659 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.643659 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-csqcx" event={"ID":"759f7e37-a2f6-4a1b-a220-24397c94b928","Type":"ContainerDied","Data":"504872aa8301581903db50171caf16ba6f52b377d961bc582510e3869bf4f846"} Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.644030 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="504872aa8301581903db50171caf16ba6f52b377d961bc582510e3869bf4f846" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.644173 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-csqcx" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.647214 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "759f7e37-a2f6-4a1b-a220-24397c94b928" (UID: "759f7e37-a2f6-4a1b-a220-24397c94b928"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.652856 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-mp95c"] Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.653199 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" event={"ID":"8b97be4b-4027-4f04-b290-eca32cde927b","Type":"ContainerDied","Data":"9f3a2a441614832f69930ca1d4599b9ca06bbb16daf7108961f055943a9168a2"} Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.653238 5070 scope.go:117] "RemoveContainer" containerID="7a6fef5ee68cc0dcb9d8e0e8fe72913249e8cb020691505d5822f10c8b21696f" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.653358 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.662662 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbrdr\" (UniqueName: \"kubernetes.io/projected/2a201584-81d1-424e-97e0-1b76842274aa-kube-api-access-zbrdr\") pod \"nova-cell0-db-create-g6ft6\" (UID: \"2a201584-81d1-424e-97e0-1b76842274aa\") " pod="openstack/nova-cell0-db-create-g6ft6" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.662857 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qft44\" (UniqueName: \"kubernetes.io/projected/79694a00-eed5-4e8f-8cff-bf85dced4d4a-kube-api-access-qft44\") pod \"nova-api-db-create-jhj27\" (UID: \"79694a00-eed5-4e8f-8cff-bf85dced4d4a\") " pod="openstack/nova-api-db-create-jhj27" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.662975 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhskf\" (UniqueName: \"kubernetes.io/projected/759f7e37-a2f6-4a1b-a220-24397c94b928-kube-api-access-rhskf\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.663034 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.663091 5070 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/759f7e37-a2f6-4a1b-a220-24397c94b928-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.687703 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qft44\" (UniqueName: \"kubernetes.io/projected/79694a00-eed5-4e8f-8cff-bf85dced4d4a-kube-api-access-qft44\") pod \"nova-api-db-create-jhj27\" (UID: \"79694a00-eed5-4e8f-8cff-bf85dced4d4a\") " pod="openstack/nova-api-db-create-jhj27" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.705253 5070 scope.go:117] "RemoveContainer" containerID="03fde9ac04e5422bd3245307e9f6d5b5a302a7083e81227bc03af03a4020f056" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.764388 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-dns-svc\") pod \"8b97be4b-4027-4f04-b290-eca32cde927b\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.764692 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qx45\" (UniqueName: \"kubernetes.io/projected/8b97be4b-4027-4f04-b290-eca32cde927b-kube-api-access-7qx45\") pod \"8b97be4b-4027-4f04-b290-eca32cde927b\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.764771 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-config\") pod \"8b97be4b-4027-4f04-b290-eca32cde927b\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.764850 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-sb\") pod \"8b97be4b-4027-4f04-b290-eca32cde927b\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.764882 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-nb\") pod \"8b97be4b-4027-4f04-b290-eca32cde927b\" (UID: \"8b97be4b-4027-4f04-b290-eca32cde927b\") " Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.765214 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbrdr\" (UniqueName: \"kubernetes.io/projected/2a201584-81d1-424e-97e0-1b76842274aa-kube-api-access-zbrdr\") pod \"nova-cell0-db-create-g6ft6\" (UID: \"2a201584-81d1-424e-97e0-1b76842274aa\") " pod="openstack/nova-cell0-db-create-g6ft6" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.765268 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whddv\" (UniqueName: \"kubernetes.io/projected/092833e5-159b-4c12-8c86-e36d41a1736e-kube-api-access-whddv\") pod \"nova-cell1-db-create-mp95c\" (UID: \"092833e5-159b-4c12-8c86-e36d41a1736e\") " pod="openstack/nova-cell1-db-create-mp95c" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.783203 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b97be4b-4027-4f04-b290-eca32cde927b-kube-api-access-7qx45" (OuterVolumeSpecName: "kube-api-access-7qx45") pod "8b97be4b-4027-4f04-b290-eca32cde927b" (UID: "8b97be4b-4027-4f04-b290-eca32cde927b"). InnerVolumeSpecName "kube-api-access-7qx45". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.784428 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbrdr\" (UniqueName: \"kubernetes.io/projected/2a201584-81d1-424e-97e0-1b76842274aa-kube-api-access-zbrdr\") pod \"nova-cell0-db-create-g6ft6\" (UID: \"2a201584-81d1-424e-97e0-1b76842274aa\") " pod="openstack/nova-cell0-db-create-g6ft6" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.819259 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8b97be4b-4027-4f04-b290-eca32cde927b" (UID: "8b97be4b-4027-4f04-b290-eca32cde927b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.841916 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8b97be4b-4027-4f04-b290-eca32cde927b" (UID: "8b97be4b-4027-4f04-b290-eca32cde927b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.844927 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8b97be4b-4027-4f04-b290-eca32cde927b" (UID: "8b97be4b-4027-4f04-b290-eca32cde927b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.851750 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-jhj27" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.854641 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-config" (OuterVolumeSpecName: "config") pod "8b97be4b-4027-4f04-b290-eca32cde927b" (UID: "8b97be4b-4027-4f04-b290-eca32cde927b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.866847 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whddv\" (UniqueName: \"kubernetes.io/projected/092833e5-159b-4c12-8c86-e36d41a1736e-kube-api-access-whddv\") pod \"nova-cell1-db-create-mp95c\" (UID: \"092833e5-159b-4c12-8c86-e36d41a1736e\") " pod="openstack/nova-cell1-db-create-mp95c" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.867120 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qx45\" (UniqueName: \"kubernetes.io/projected/8b97be4b-4027-4f04-b290-eca32cde927b-kube-api-access-7qx45\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.867179 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.867231 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.867281 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.867391 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b97be4b-4027-4f04-b290-eca32cde927b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.883682 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whddv\" (UniqueName: \"kubernetes.io/projected/092833e5-159b-4c12-8c86-e36d41a1736e-kube-api-access-whddv\") pod \"nova-cell1-db-create-mp95c\" (UID: \"092833e5-159b-4c12-8c86-e36d41a1736e\") " pod="openstack/nova-cell1-db-create-mp95c" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.885570 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-g6ft6" Dec 13 03:30:41 crc kubenswrapper[5070]: I1213 03:30:41.953870 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mp95c" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.035127 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr"] Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.050042 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr"] Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.188248 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b97be4b-4027-4f04-b290-eca32cde927b" path="/var/lib/kubelet/pods/8b97be4b-4027-4f04-b290-eca32cde927b/volumes" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.345231 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-jhj27"] Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.475745 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-g6ft6"] Dec 13 03:30:42 crc kubenswrapper[5070]: W1213 03:30:42.496104 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a201584_81d1_424e_97e0_1b76842274aa.slice/crio-568dc2ff982fbcf19621445fa735332600beaafa3e09c62302ac2aba1e8b04f3 WatchSource:0}: Error finding container 568dc2ff982fbcf19621445fa735332600beaafa3e09c62302ac2aba1e8b04f3: Status 404 returned error can't find the container with id 568dc2ff982fbcf19621445fa735332600beaafa3e09c62302ac2aba1e8b04f3 Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.623641 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-mp95c"] Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.664307 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"0d0b3d1f-385d-47e5-af36-d016c5b9cd1b","Type":"ContainerStarted","Data":"0635c1893cb787af4251566b47fd505d78244c0c5ccf76442448d2728befe7ff"} Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.681738 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-n7ll5" event={"ID":"64c718c6-de73-4e08-9506-dce5dc9ebffd","Type":"ContainerStarted","Data":"7a29c9bb0683c47b91b2cb9bf3a68c1aa41a48307bb8349ec07d65d45a4594cb"} Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.689258 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-mp95c" event={"ID":"092833e5-159b-4c12-8c86-e36d41a1736e","Type":"ContainerStarted","Data":"976e2a959e326fdee50e511724097e2b3442b0f3400600e24d2c2b8501ba89de"} Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.690980 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6ff985df9f-bj7dz"] Dec 13 03:30:42 crc kubenswrapper[5070]: E1213 03:30:42.691333 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b97be4b-4027-4f04-b290-eca32cde927b" containerName="dnsmasq-dns" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.691351 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b97be4b-4027-4f04-b290-eca32cde927b" containerName="dnsmasq-dns" Dec 13 03:30:42 crc kubenswrapper[5070]: E1213 03:30:42.691374 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b97be4b-4027-4f04-b290-eca32cde927b" containerName="init" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.691381 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b97be4b-4027-4f04-b290-eca32cde927b" containerName="init" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.691570 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b97be4b-4027-4f04-b290-eca32cde927b" containerName="dnsmasq-dns" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.692414 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.695524 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ff62v" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.695509 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-g6ft6" event={"ID":"2a201584-81d1-424e-97e0-1b76842274aa","Type":"ContainerStarted","Data":"568dc2ff982fbcf19621445fa735332600beaafa3e09c62302ac2aba1e8b04f3"} Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.695735 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.695767 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.701372 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-jhj27" event={"ID":"79694a00-eed5-4e8f-8cff-bf85dced4d4a","Type":"ContainerStarted","Data":"b90514f3336fe99d2d4e99f02eb04f15557dd7025148a6672e3d4429bb1f3e14"} Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.707431 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-8674755ffd-cl8xp"] Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.708966 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.712674 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.740275 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6ff985df9f-bj7dz"] Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.750400 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-8674755ffd-cl8xp"] Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.759568 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.619698692 podStartE2EDuration="11.759547526s" podCreationTimestamp="2025-12-13 03:30:31 +0000 UTC" firstStartedPulling="2025-12-13 03:30:32.123917329 +0000 UTC m=+1124.359760875" lastFinishedPulling="2025-12-13 03:30:41.263766163 +0000 UTC m=+1133.499609709" observedRunningTime="2025-12-13 03:30:42.706776674 +0000 UTC m=+1134.942620220" watchObservedRunningTime="2025-12-13 03:30:42.759547526 +0000 UTC m=+1134.995391072" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.784939 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzj96\" (UniqueName: \"kubernetes.io/projected/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-kube-api-access-xzj96\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.785070 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-config-data\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.785096 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-logs\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.785119 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-logs\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.785183 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qghc\" (UniqueName: \"kubernetes.io/projected/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-kube-api-access-2qghc\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.785206 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-combined-ca-bundle\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.785225 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-config-data-custom\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.785244 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-config-data\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.785268 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-combined-ca-bundle\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.785288 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-config-data-custom\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.817178 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-n7ll5" podStartSLOduration=12.863031289 podStartE2EDuration="55.81716189s" podCreationTimestamp="2025-12-13 03:29:47 +0000 UTC" firstStartedPulling="2025-12-13 03:29:58.277086912 +0000 UTC m=+1090.512930458" lastFinishedPulling="2025-12-13 03:30:41.231217513 +0000 UTC m=+1133.467061059" observedRunningTime="2025-12-13 03:30:42.815142805 +0000 UTC m=+1135.050986351" watchObservedRunningTime="2025-12-13 03:30:42.81716189 +0000 UTC m=+1135.053005436" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.887988 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzj96\" (UniqueName: \"kubernetes.io/projected/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-kube-api-access-xzj96\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.888087 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-config-data\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.888108 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-logs\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.888131 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-logs\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.888163 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qghc\" (UniqueName: \"kubernetes.io/projected/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-kube-api-access-2qghc\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.888179 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-combined-ca-bundle\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.888202 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-config-data-custom\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.888223 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-config-data\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.888250 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-combined-ca-bundle\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.888267 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-config-data-custom\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.892824 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-logs\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.893245 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-logs\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.906155 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-combined-ca-bundle\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.908214 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-config-data-custom\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.908693 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-config-data-custom\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.911375 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-combined-ca-bundle\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.914605 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-config-data\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.920276 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-config-data\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.928043 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzj96\" (UniqueName: \"kubernetes.io/projected/fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d-kube-api-access-xzj96\") pod \"barbican-keystone-listener-8674755ffd-cl8xp\" (UID: \"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d\") " pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.928747 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qghc\" (UniqueName: \"kubernetes.io/projected/e8acefb2-398a-4cad-aaf8-7a72714b0ac9-kube-api-access-2qghc\") pod \"barbican-worker-6ff985df9f-bj7dz\" (UID: \"e8acefb2-398a-4cad-aaf8-7a72714b0ac9\") " pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.933062 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-869f779d85-6nfwl"] Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.959521 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.961512 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-6nfwl"] Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.990181 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-config\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.990278 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.990300 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjwr5\" (UniqueName: \"kubernetes.io/projected/36d51933-fda4-48e7-a109-f77461bd01b6-kube-api-access-fjwr5\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.990351 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:42 crc kubenswrapper[5070]: I1213 03:30:42.990372 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-dns-svc\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.019790 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6ff985df9f-bj7dz" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.045797 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.076649 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-55d578b79b-csd56"] Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.078166 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.081852 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.091706 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.091761 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjwr5\" (UniqueName: \"kubernetes.io/projected/36d51933-fda4-48e7-a109-f77461bd01b6-kube-api-access-fjwr5\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.092062 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.092111 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-dns-svc\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.092189 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-config\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.093224 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-config\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.093919 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.094864 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.096671 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-dns-svc\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.099420 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-55d578b79b-csd56"] Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.130486 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjwr5\" (UniqueName: \"kubernetes.io/projected/36d51933-fda4-48e7-a109-f77461bd01b6-kube-api-access-fjwr5\") pod \"dnsmasq-dns-869f779d85-6nfwl\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.195543 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data-custom\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.195840 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.195868 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-logs\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.195983 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-combined-ca-bundle\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.196010 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z25b5\" (UniqueName: \"kubernetes.io/projected/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-kube-api-access-z25b5\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.297235 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data-custom\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.297598 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.297650 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-logs\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.297753 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-combined-ca-bundle\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.297783 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z25b5\" (UniqueName: \"kubernetes.io/projected/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-kube-api-access-z25b5\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.298538 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-logs\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.302515 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-combined-ca-bundle\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.304058 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.309717 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.325057 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z25b5\" (UniqueName: \"kubernetes.io/projected/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-kube-api-access-z25b5\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.325156 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data-custom\") pod \"barbican-api-55d578b79b-csd56\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.404868 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.559847 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6ff985df9f-bj7dz"] Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.726327 5070 generic.go:334] "Generic (PLEG): container finished" podID="092833e5-159b-4c12-8c86-e36d41a1736e" containerID="a96fc39999f9ae75d2f09170119f6a6de057b1e4f7cb2c35eb09c60904ad85e0" exitCode=0 Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.726412 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-mp95c" event={"ID":"092833e5-159b-4c12-8c86-e36d41a1736e","Type":"ContainerDied","Data":"a96fc39999f9ae75d2f09170119f6a6de057b1e4f7cb2c35eb09c60904ad85e0"} Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.732154 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-8674755ffd-cl8xp"] Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.735238 5070 generic.go:334] "Generic (PLEG): container finished" podID="2a201584-81d1-424e-97e0-1b76842274aa" containerID="e673b980a21e64ee48353347317ea3c360dcd636d75218beb85a42229cabdc03" exitCode=0 Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.735306 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-g6ft6" event={"ID":"2a201584-81d1-424e-97e0-1b76842274aa","Type":"ContainerDied","Data":"e673b980a21e64ee48353347317ea3c360dcd636d75218beb85a42229cabdc03"} Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.744727 5070 generic.go:334] "Generic (PLEG): container finished" podID="79694a00-eed5-4e8f-8cff-bf85dced4d4a" containerID="5858f0b9a770bd713635250a15e2ac4cd7c62f33e94600bc9a0dca82f946c0f6" exitCode=0 Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.744794 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-jhj27" event={"ID":"79694a00-eed5-4e8f-8cff-bf85dced4d4a","Type":"ContainerDied","Data":"5858f0b9a770bd713635250a15e2ac4cd7c62f33e94600bc9a0dca82f946c0f6"} Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.750551 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6ff985df9f-bj7dz" event={"ID":"e8acefb2-398a-4cad-aaf8-7a72714b0ac9","Type":"ContainerStarted","Data":"a78889e04b2713331e2fc06ea7d0de9308393e90c2054ee96bec8ac536a8418b"} Dec 13 03:30:43 crc kubenswrapper[5070]: I1213 03:30:43.856185 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-6nfwl"] Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.023062 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-55d578b79b-csd56"] Dec 13 03:30:44 crc kubenswrapper[5070]: W1213 03:30:44.025271 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0eb4c370_92f7_42ac_8362_5e4bd8b3b504.slice/crio-27a12762893af057f1a698a7139f331de0b82f58625e4823e241833073fdeb48 WatchSource:0}: Error finding container 27a12762893af057f1a698a7139f331de0b82f58625e4823e241833073fdeb48: Status 404 returned error can't find the container with id 27a12762893af057f1a698a7139f331de0b82f58625e4823e241833073fdeb48 Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.563493 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.564371 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="ceilometer-central-agent" containerID="cri-o://c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04" gracePeriod=30 Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.564594 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="proxy-httpd" containerID="cri-o://1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2" gracePeriod=30 Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.564638 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="sg-core" containerID="cri-o://74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c" gracePeriod=30 Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.564680 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="ceilometer-notification-agent" containerID="cri-o://ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270" gracePeriod=30 Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.572005 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.767141 5070 generic.go:334] "Generic (PLEG): container finished" podID="36d51933-fda4-48e7-a109-f77461bd01b6" containerID="66e69d857c3230ac50ae71752587513bcdce67e797ddfd6d648f86614530bf7b" exitCode=0 Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.767228 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" event={"ID":"36d51933-fda4-48e7-a109-f77461bd01b6","Type":"ContainerDied","Data":"66e69d857c3230ac50ae71752587513bcdce67e797ddfd6d648f86614530bf7b"} Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.767260 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" event={"ID":"36d51933-fda4-48e7-a109-f77461bd01b6","Type":"ContainerStarted","Data":"7192485de5f227e5ee94923d2e8992640e72600608cd96d1ef6993a96fc8883d"} Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.779822 5070 generic.go:334] "Generic (PLEG): container finished" podID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerID="1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2" exitCode=0 Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.779861 5070 generic.go:334] "Generic (PLEG): container finished" podID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerID="74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c" exitCode=2 Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.779917 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8009852b-9899-44f8-95ee-bf5b03dd8fc7","Type":"ContainerDied","Data":"1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2"} Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.779950 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8009852b-9899-44f8-95ee-bf5b03dd8fc7","Type":"ContainerDied","Data":"74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c"} Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.787168 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" event={"ID":"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d","Type":"ContainerStarted","Data":"b31fc07656acf2ac0aeb785d20bf5bf5fa9c34565eebed9ec62b6b05a5d9d7cd"} Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.801398 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-55d578b79b-csd56" event={"ID":"0eb4c370-92f7-42ac-8362-5e4bd8b3b504","Type":"ContainerStarted","Data":"bbf94aa09aa833afd86624f0cb7cbffce48432a33c75dd9aa4acf582cc28ddf7"} Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.801436 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-55d578b79b-csd56" event={"ID":"0eb4c370-92f7-42ac-8362-5e4bd8b3b504","Type":"ContainerStarted","Data":"658955e06043690e449625229edeb7e8d88533763fee88adbecaa31c566e2e8b"} Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.801468 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-55d578b79b-csd56" event={"ID":"0eb4c370-92f7-42ac-8362-5e4bd8b3b504","Type":"ContainerStarted","Data":"27a12762893af057f1a698a7139f331de0b82f58625e4823e241833073fdeb48"} Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.801724 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.801765 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:44 crc kubenswrapper[5070]: I1213 03:30:44.819717 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-55d578b79b-csd56" podStartSLOduration=1.8196950410000001 podStartE2EDuration="1.819695041s" podCreationTimestamp="2025-12-13 03:30:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:30:44.819605468 +0000 UTC m=+1137.055449024" watchObservedRunningTime="2025-12-13 03:30:44.819695041 +0000 UTC m=+1137.055538597" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.564512 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mp95c" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.576431 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-g6ft6" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.745890 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whddv\" (UniqueName: \"kubernetes.io/projected/092833e5-159b-4c12-8c86-e36d41a1736e-kube-api-access-whddv\") pod \"092833e5-159b-4c12-8c86-e36d41a1736e\" (UID: \"092833e5-159b-4c12-8c86-e36d41a1736e\") " Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.746030 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbrdr\" (UniqueName: \"kubernetes.io/projected/2a201584-81d1-424e-97e0-1b76842274aa-kube-api-access-zbrdr\") pod \"2a201584-81d1-424e-97e0-1b76842274aa\" (UID: \"2a201584-81d1-424e-97e0-1b76842274aa\") " Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.753719 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a201584-81d1-424e-97e0-1b76842274aa-kube-api-access-zbrdr" (OuterVolumeSpecName: "kube-api-access-zbrdr") pod "2a201584-81d1-424e-97e0-1b76842274aa" (UID: "2a201584-81d1-424e-97e0-1b76842274aa"). InnerVolumeSpecName "kube-api-access-zbrdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.756245 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/092833e5-159b-4c12-8c86-e36d41a1736e-kube-api-access-whddv" (OuterVolumeSpecName: "kube-api-access-whddv") pod "092833e5-159b-4c12-8c86-e36d41a1736e" (UID: "092833e5-159b-4c12-8c86-e36d41a1736e"). InnerVolumeSpecName "kube-api-access-whddv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.812063 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-mp95c" event={"ID":"092833e5-159b-4c12-8c86-e36d41a1736e","Type":"ContainerDied","Data":"976e2a959e326fdee50e511724097e2b3442b0f3400600e24d2c2b8501ba89de"} Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.812377 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="976e2a959e326fdee50e511724097e2b3442b0f3400600e24d2c2b8501ba89de" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.812349 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-mp95c" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.823738 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-g6ft6" event={"ID":"2a201584-81d1-424e-97e0-1b76842274aa","Type":"ContainerDied","Data":"568dc2ff982fbcf19621445fa735332600beaafa3e09c62302ac2aba1e8b04f3"} Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.823778 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="568dc2ff982fbcf19621445fa735332600beaafa3e09c62302ac2aba1e8b04f3" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.823846 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-g6ft6" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.836083 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" event={"ID":"36d51933-fda4-48e7-a109-f77461bd01b6","Type":"ContainerStarted","Data":"06e964c99b3fd821858762a7911e280833d507f457e211d28901d8d711e81cd8"} Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.837193 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.839874 5070 generic.go:334] "Generic (PLEG): container finished" podID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerID="c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04" exitCode=0 Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.840554 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8009852b-9899-44f8-95ee-bf5b03dd8fc7","Type":"ContainerDied","Data":"c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04"} Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.849213 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whddv\" (UniqueName: \"kubernetes.io/projected/092833e5-159b-4c12-8c86-e36d41a1736e-kube-api-access-whddv\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.849407 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbrdr\" (UniqueName: \"kubernetes.io/projected/2a201584-81d1-424e-97e0-1b76842274aa-kube-api-access-zbrdr\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:45 crc kubenswrapper[5070]: I1213 03:30:45.856931 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" podStartSLOduration=3.856915703 podStartE2EDuration="3.856915703s" podCreationTimestamp="2025-12-13 03:30:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:30:45.853647184 +0000 UTC m=+1138.089490750" watchObservedRunningTime="2025-12-13 03:30:45.856915703 +0000 UTC m=+1138.092759249" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.025637 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-jhj27" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.154563 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qft44\" (UniqueName: \"kubernetes.io/projected/79694a00-eed5-4e8f-8cff-bf85dced4d4a-kube-api-access-qft44\") pod \"79694a00-eed5-4e8f-8cff-bf85dced4d4a\" (UID: \"79694a00-eed5-4e8f-8cff-bf85dced4d4a\") " Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.159926 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79694a00-eed5-4e8f-8cff-bf85dced4d4a-kube-api-access-qft44" (OuterVolumeSpecName: "kube-api-access-qft44") pod "79694a00-eed5-4e8f-8cff-bf85dced4d4a" (UID: "79694a00-eed5-4e8f-8cff-bf85dced4d4a"). InnerVolumeSpecName "kube-api-access-qft44". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.256414 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qft44\" (UniqueName: \"kubernetes.io/projected/79694a00-eed5-4e8f-8cff-bf85dced4d4a-kube-api-access-qft44\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.348310 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7d678df9bd-j8bfh"] Dec 13 03:30:46 crc kubenswrapper[5070]: E1213 03:30:46.358906 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="092833e5-159b-4c12-8c86-e36d41a1736e" containerName="mariadb-database-create" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.358942 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="092833e5-159b-4c12-8c86-e36d41a1736e" containerName="mariadb-database-create" Dec 13 03:30:46 crc kubenswrapper[5070]: E1213 03:30:46.358958 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79694a00-eed5-4e8f-8cff-bf85dced4d4a" containerName="mariadb-database-create" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.358964 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="79694a00-eed5-4e8f-8cff-bf85dced4d4a" containerName="mariadb-database-create" Dec 13 03:30:46 crc kubenswrapper[5070]: E1213 03:30:46.358985 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a201584-81d1-424e-97e0-1b76842274aa" containerName="mariadb-database-create" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.358992 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a201584-81d1-424e-97e0-1b76842274aa" containerName="mariadb-database-create" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.359148 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a201584-81d1-424e-97e0-1b76842274aa" containerName="mariadb-database-create" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.359166 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="092833e5-159b-4c12-8c86-e36d41a1736e" containerName="mariadb-database-create" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.359190 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="79694a00-eed5-4e8f-8cff-bf85dced4d4a" containerName="mariadb-database-create" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.359266 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b6dbdb6f5-6h6vr" podUID="8b97be4b-4027-4f04-b290-eca32cde927b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.360136 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.363160 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.363266 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.366084 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7d678df9bd-j8bfh"] Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.560344 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-combined-ca-bundle\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.560652 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-internal-tls-certs\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.560673 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-config-data-custom\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.560694 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c23e095e-de7a-419f-b6d4-1ca536a3069b-logs\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.560724 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c658x\" (UniqueName: \"kubernetes.io/projected/c23e095e-de7a-419f-b6d4-1ca536a3069b-kube-api-access-c658x\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.560752 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-config-data\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.560806 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-public-tls-certs\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.663721 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-combined-ca-bundle\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.663774 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-config-data-custom\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.663790 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-internal-tls-certs\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.663815 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c23e095e-de7a-419f-b6d4-1ca536a3069b-logs\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.663852 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c658x\" (UniqueName: \"kubernetes.io/projected/c23e095e-de7a-419f-b6d4-1ca536a3069b-kube-api-access-c658x\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.663887 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-config-data\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.663945 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-public-tls-certs\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.668167 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c23e095e-de7a-419f-b6d4-1ca536a3069b-logs\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.669557 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-public-tls-certs\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.671625 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-config-data\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.675052 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-internal-tls-certs\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.675548 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-combined-ca-bundle\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.676872 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c23e095e-de7a-419f-b6d4-1ca536a3069b-config-data-custom\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.689274 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c658x\" (UniqueName: \"kubernetes.io/projected/c23e095e-de7a-419f-b6d4-1ca536a3069b-kube-api-access-c658x\") pod \"barbican-api-7d678df9bd-j8bfh\" (UID: \"c23e095e-de7a-419f-b6d4-1ca536a3069b\") " pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.729839 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.766930 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.860062 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6ff985df9f-bj7dz" event={"ID":"e8acefb2-398a-4cad-aaf8-7a72714b0ac9","Type":"ContainerStarted","Data":"9b7fd42586e7fb58dce2d821789a29e77cbbb062f39ac2f7f8cba258c3b28d95"} Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.860111 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6ff985df9f-bj7dz" event={"ID":"e8acefb2-398a-4cad-aaf8-7a72714b0ac9","Type":"ContainerStarted","Data":"9b74666e95b7c419d31cbe0bbd9f3152c6004ad3e67e1d69e0f31ef9ed13f102"} Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.867527 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-run-httpd\") pod \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.867639 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-sg-core-conf-yaml\") pod \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.867722 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-log-httpd\") pod \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.867820 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvdh8\" (UniqueName: \"kubernetes.io/projected/8009852b-9899-44f8-95ee-bf5b03dd8fc7-kube-api-access-fvdh8\") pod \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.867890 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-combined-ca-bundle\") pod \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.867959 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-config-data\") pod \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.868008 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-scripts\") pod \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\" (UID: \"8009852b-9899-44f8-95ee-bf5b03dd8fc7\") " Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.869865 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8009852b-9899-44f8-95ee-bf5b03dd8fc7" (UID: "8009852b-9899-44f8-95ee-bf5b03dd8fc7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.870096 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8009852b-9899-44f8-95ee-bf5b03dd8fc7" (UID: "8009852b-9899-44f8-95ee-bf5b03dd8fc7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.873010 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-scripts" (OuterVolumeSpecName: "scripts") pod "8009852b-9899-44f8-95ee-bf5b03dd8fc7" (UID: "8009852b-9899-44f8-95ee-bf5b03dd8fc7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.875984 5070 generic.go:334] "Generic (PLEG): container finished" podID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerID="ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270" exitCode=0 Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.876109 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8009852b-9899-44f8-95ee-bf5b03dd8fc7","Type":"ContainerDied","Data":"ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270"} Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.876149 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8009852b-9899-44f8-95ee-bf5b03dd8fc7","Type":"ContainerDied","Data":"a0c0ea59738082c2f3688da8eb19770a130f6ee1ecc70bc1c21247e2aee87b6d"} Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.876175 5070 scope.go:117] "RemoveContainer" containerID="1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.876397 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.890569 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8009852b-9899-44f8-95ee-bf5b03dd8fc7-kube-api-access-fvdh8" (OuterVolumeSpecName: "kube-api-access-fvdh8") pod "8009852b-9899-44f8-95ee-bf5b03dd8fc7" (UID: "8009852b-9899-44f8-95ee-bf5b03dd8fc7"). InnerVolumeSpecName "kube-api-access-fvdh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.893388 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6ff985df9f-bj7dz" podStartSLOduration=2.45813148 podStartE2EDuration="4.893368565s" podCreationTimestamp="2025-12-13 03:30:42 +0000 UTC" firstStartedPulling="2025-12-13 03:30:43.585566277 +0000 UTC m=+1135.821409823" lastFinishedPulling="2025-12-13 03:30:46.020803362 +0000 UTC m=+1138.256646908" observedRunningTime="2025-12-13 03:30:46.885218632 +0000 UTC m=+1139.121062168" watchObservedRunningTime="2025-12-13 03:30:46.893368565 +0000 UTC m=+1139.129212111" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.894994 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" event={"ID":"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d","Type":"ContainerStarted","Data":"641b8ee30b027d9967615c2ba325a6b68973ede51b9edcaeec2d540e57f6a4f5"} Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.895039 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" event={"ID":"fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d","Type":"ContainerStarted","Data":"86b528d94600239bfb78946c2cd521c5ad041c5f7822f5f385133dfad5f47cb1"} Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.909700 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-jhj27" event={"ID":"79694a00-eed5-4e8f-8cff-bf85dced4d4a","Type":"ContainerDied","Data":"b90514f3336fe99d2d4e99f02eb04f15557dd7025148a6672e3d4429bb1f3e14"} Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.909748 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b90514f3336fe99d2d4e99f02eb04f15557dd7025148a6672e3d4429bb1f3e14" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.909929 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-jhj27" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.927023 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8009852b-9899-44f8-95ee-bf5b03dd8fc7" (UID: "8009852b-9899-44f8-95ee-bf5b03dd8fc7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.942882 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-8674755ffd-cl8xp" podStartSLOduration=2.59016826 podStartE2EDuration="4.942858398s" podCreationTimestamp="2025-12-13 03:30:42 +0000 UTC" firstStartedPulling="2025-12-13 03:30:43.73459078 +0000 UTC m=+1135.970434326" lastFinishedPulling="2025-12-13 03:30:46.087280918 +0000 UTC m=+1138.323124464" observedRunningTime="2025-12-13 03:30:46.919211811 +0000 UTC m=+1139.155055387" watchObservedRunningTime="2025-12-13 03:30:46.942858398 +0000 UTC m=+1139.178701944" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.961644 5070 scope.go:117] "RemoveContainer" containerID="74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.973616 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.973642 5070 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.973653 5070 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.973662 5070 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8009852b-9899-44f8-95ee-bf5b03dd8fc7-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:46 crc kubenswrapper[5070]: I1213 03:30:46.973670 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvdh8\" (UniqueName: \"kubernetes.io/projected/8009852b-9899-44f8-95ee-bf5b03dd8fc7-kube-api-access-fvdh8\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.012596 5070 scope.go:117] "RemoveContainer" containerID="ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.033189 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-config-data" (OuterVolumeSpecName: "config-data") pod "8009852b-9899-44f8-95ee-bf5b03dd8fc7" (UID: "8009852b-9899-44f8-95ee-bf5b03dd8fc7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.035767 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8009852b-9899-44f8-95ee-bf5b03dd8fc7" (UID: "8009852b-9899-44f8-95ee-bf5b03dd8fc7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.040611 5070 scope.go:117] "RemoveContainer" containerID="c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.074435 5070 scope.go:117] "RemoveContainer" containerID="1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2" Dec 13 03:30:47 crc kubenswrapper[5070]: E1213 03:30:47.074936 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2\": container with ID starting with 1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2 not found: ID does not exist" containerID="1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.074967 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2"} err="failed to get container status \"1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2\": rpc error: code = NotFound desc = could not find container \"1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2\": container with ID starting with 1e1289301407308da63af061685039f4c109fd1c7ff9600f62cdc7b39d3e19c2 not found: ID does not exist" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.074994 5070 scope.go:117] "RemoveContainer" containerID="74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.075012 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.075038 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8009852b-9899-44f8-95ee-bf5b03dd8fc7-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:47 crc kubenswrapper[5070]: E1213 03:30:47.076356 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c\": container with ID starting with 74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c not found: ID does not exist" containerID="74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.076386 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c"} err="failed to get container status \"74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c\": rpc error: code = NotFound desc = could not find container \"74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c\": container with ID starting with 74fcad3ecac10ec8f2db5f8bda2e9b7ab772d48f73600a5f4cb1c53cb754ce1c not found: ID does not exist" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.076427 5070 scope.go:117] "RemoveContainer" containerID="ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270" Dec 13 03:30:47 crc kubenswrapper[5070]: E1213 03:30:47.076989 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270\": container with ID starting with ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270 not found: ID does not exist" containerID="ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.077012 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270"} err="failed to get container status \"ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270\": rpc error: code = NotFound desc = could not find container \"ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270\": container with ID starting with ae9ae3f7f2f1a70863f552bdc7c330ac3d5ca3a8e39d057bf3436ae3e059b270 not found: ID does not exist" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.077029 5070 scope.go:117] "RemoveContainer" containerID="c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04" Dec 13 03:30:47 crc kubenswrapper[5070]: E1213 03:30:47.077704 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04\": container with ID starting with c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04 not found: ID does not exist" containerID="c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.077730 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04"} err="failed to get container status \"c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04\": rpc error: code = NotFound desc = could not find container \"c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04\": container with ID starting with c5293ba7a9fdf3e19f95134512107b263db5b246987b5179b4dc4e9508270a04 not found: ID does not exist" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.252034 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.262846 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.285046 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:47 crc kubenswrapper[5070]: E1213 03:30:47.285433 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="ceilometer-central-agent" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.285476 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="ceilometer-central-agent" Dec 13 03:30:47 crc kubenswrapper[5070]: E1213 03:30:47.285488 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="proxy-httpd" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.285494 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="proxy-httpd" Dec 13 03:30:47 crc kubenswrapper[5070]: E1213 03:30:47.285520 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="ceilometer-notification-agent" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.285526 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="ceilometer-notification-agent" Dec 13 03:30:47 crc kubenswrapper[5070]: E1213 03:30:47.285542 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="sg-core" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.285549 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="sg-core" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.285723 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="sg-core" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.285748 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="proxy-httpd" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.285766 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="ceilometer-notification-agent" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.285778 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" containerName="ceilometer-central-agent" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.287332 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.293770 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.293942 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.331295 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7d678df9bd-j8bfh"] Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.356153 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.483182 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-run-httpd\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.483251 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.483295 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-log-httpd\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.483336 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zghr9\" (UniqueName: \"kubernetes.io/projected/4dad639a-f1b7-4430-b213-78603c9d16aa-kube-api-access-zghr9\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.483381 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-scripts\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.483462 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.483488 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-config-data\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.584666 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-scripts\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.584734 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.584756 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-config-data\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.584803 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-run-httpd\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.584851 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.584880 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-log-httpd\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.584911 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zghr9\" (UniqueName: \"kubernetes.io/projected/4dad639a-f1b7-4430-b213-78603c9d16aa-kube-api-access-zghr9\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.586144 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-run-httpd\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.590851 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-log-httpd\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.591562 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.596054 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-scripts\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.596473 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-config-data\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.597733 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.610819 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zghr9\" (UniqueName: \"kubernetes.io/projected/4dad639a-f1b7-4430-b213-78603c9d16aa-kube-api-access-zghr9\") pod \"ceilometer-0\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.659964 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.745630 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.927220 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d678df9bd-j8bfh" event={"ID":"c23e095e-de7a-419f-b6d4-1ca536a3069b","Type":"ContainerStarted","Data":"6f0f766cce216fe43e9a781f8f0e546ca043cd65dfae0aa39c1cbff1ed38ce83"} Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.928209 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.928278 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d678df9bd-j8bfh" event={"ID":"c23e095e-de7a-419f-b6d4-1ca536a3069b","Type":"ContainerStarted","Data":"758889e4d479a113281e190a68c0f8a353381cc775df05c2387b78dc865b0279"} Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.928294 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d678df9bd-j8bfh" event={"ID":"c23e095e-de7a-419f-b6d4-1ca536a3069b","Type":"ContainerStarted","Data":"3fdf65fb24c81ced1fdb1a44e5b3f1de73f4bef8ece81ef16f826762a49c1244"} Dec 13 03:30:47 crc kubenswrapper[5070]: I1213 03:30:47.946734 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7d678df9bd-j8bfh" podStartSLOduration=1.946713999 podStartE2EDuration="1.946713999s" podCreationTimestamp="2025-12-13 03:30:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:30:47.942160555 +0000 UTC m=+1140.178004101" watchObservedRunningTime="2025-12-13 03:30:47.946713999 +0000 UTC m=+1140.182557545" Dec 13 03:30:48 crc kubenswrapper[5070]: I1213 03:30:48.178343 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8009852b-9899-44f8-95ee-bf5b03dd8fc7" path="/var/lib/kubelet/pods/8009852b-9899-44f8-95ee-bf5b03dd8fc7/volumes" Dec 13 03:30:48 crc kubenswrapper[5070]: I1213 03:30:48.206064 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:48 crc kubenswrapper[5070]: I1213 03:30:48.940334 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dad639a-f1b7-4430-b213-78603c9d16aa","Type":"ContainerStarted","Data":"03b341da32f7abd2a97154d1bc9fadc835fd6b616bcdf42d433c7d12fa11db29"} Dec 13 03:30:48 crc kubenswrapper[5070]: I1213 03:30:48.947330 5070 generic.go:334] "Generic (PLEG): container finished" podID="64c718c6-de73-4e08-9506-dce5dc9ebffd" containerID="7a29c9bb0683c47b91b2cb9bf3a68c1aa41a48307bb8349ec07d65d45a4594cb" exitCode=0 Dec 13 03:30:48 crc kubenswrapper[5070]: I1213 03:30:48.947404 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-n7ll5" event={"ID":"64c718c6-de73-4e08-9506-dce5dc9ebffd","Type":"ContainerDied","Data":"7a29c9bb0683c47b91b2cb9bf3a68c1aa41a48307bb8349ec07d65d45a4594cb"} Dec 13 03:30:48 crc kubenswrapper[5070]: I1213 03:30:48.947802 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:49 crc kubenswrapper[5070]: I1213 03:30:49.957566 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dad639a-f1b7-4430-b213-78603c9d16aa","Type":"ContainerStarted","Data":"c17487b04cbbf4a94d1ba0a99a49413d18e7190ff62ee7d32039b9c4e206bff5"} Dec 13 03:30:49 crc kubenswrapper[5070]: I1213 03:30:49.958162 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dad639a-f1b7-4430-b213-78603c9d16aa","Type":"ContainerStarted","Data":"1e6772ff0a35c3ba899751d41299453744bc0fbd630e218fb1b17b3cd9281a0e"} Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.322023 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.341801 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/64c718c6-de73-4e08-9506-dce5dc9ebffd-etc-machine-id\") pod \"64c718c6-de73-4e08-9506-dce5dc9ebffd\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.341847 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r6jl\" (UniqueName: \"kubernetes.io/projected/64c718c6-de73-4e08-9506-dce5dc9ebffd-kube-api-access-9r6jl\") pod \"64c718c6-de73-4e08-9506-dce5dc9ebffd\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.341898 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64c718c6-de73-4e08-9506-dce5dc9ebffd-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "64c718c6-de73-4e08-9506-dce5dc9ebffd" (UID: "64c718c6-de73-4e08-9506-dce5dc9ebffd"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.341943 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-combined-ca-bundle\") pod \"64c718c6-de73-4e08-9506-dce5dc9ebffd\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.341988 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-db-sync-config-data\") pod \"64c718c6-de73-4e08-9506-dce5dc9ebffd\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.342542 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-scripts\") pod \"64c718c6-de73-4e08-9506-dce5dc9ebffd\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.342967 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-config-data\") pod \"64c718c6-de73-4e08-9506-dce5dc9ebffd\" (UID: \"64c718c6-de73-4e08-9506-dce5dc9ebffd\") " Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.343483 5070 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/64c718c6-de73-4e08-9506-dce5dc9ebffd-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.348398 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "64c718c6-de73-4e08-9506-dce5dc9ebffd" (UID: "64c718c6-de73-4e08-9506-dce5dc9ebffd"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.348778 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-scripts" (OuterVolumeSpecName: "scripts") pod "64c718c6-de73-4e08-9506-dce5dc9ebffd" (UID: "64c718c6-de73-4e08-9506-dce5dc9ebffd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.352568 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64c718c6-de73-4e08-9506-dce5dc9ebffd-kube-api-access-9r6jl" (OuterVolumeSpecName: "kube-api-access-9r6jl") pod "64c718c6-de73-4e08-9506-dce5dc9ebffd" (UID: "64c718c6-de73-4e08-9506-dce5dc9ebffd"). InnerVolumeSpecName "kube-api-access-9r6jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.373705 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64c718c6-de73-4e08-9506-dce5dc9ebffd" (UID: "64c718c6-de73-4e08-9506-dce5dc9ebffd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.398772 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-config-data" (OuterVolumeSpecName: "config-data") pod "64c718c6-de73-4e08-9506-dce5dc9ebffd" (UID: "64c718c6-de73-4e08-9506-dce5dc9ebffd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.445487 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r6jl\" (UniqueName: \"kubernetes.io/projected/64c718c6-de73-4e08-9506-dce5dc9ebffd-kube-api-access-9r6jl\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.445530 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.445544 5070 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.445556 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.445569 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c718c6-de73-4e08-9506-dce5dc9ebffd-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.967274 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-n7ll5" event={"ID":"64c718c6-de73-4e08-9506-dce5dc9ebffd","Type":"ContainerDied","Data":"b986ef3e2e6151ac907c9532f9ac0bfb11c120c8b28571b6a226d508c9764a08"} Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.967608 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b986ef3e2e6151ac907c9532f9ac0bfb11c120c8b28571b6a226d508c9764a08" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.967314 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-n7ll5" Dec 13 03:30:50 crc kubenswrapper[5070]: I1213 03:30:50.974115 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dad639a-f1b7-4430-b213-78603c9d16aa","Type":"ContainerStarted","Data":"d429d87d122c54a47a3ea93e683f2170f0014e80d514b4ecae48bc796f40f541"} Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.270746 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 03:30:51 crc kubenswrapper[5070]: E1213 03:30:51.271253 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64c718c6-de73-4e08-9506-dce5dc9ebffd" containerName="cinder-db-sync" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.271325 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="64c718c6-de73-4e08-9506-dce5dc9ebffd" containerName="cinder-db-sync" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.271595 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="64c718c6-de73-4e08-9506-dce5dc9ebffd" containerName="cinder-db-sync" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.272486 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.275548 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.275782 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.276027 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-2xw4r" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.278425 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.306126 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.361490 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b4c78b2-a38d-476f-b88a-089e89d92fef-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.361545 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-scripts\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.361567 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.361609 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.361642 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.361673 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7sp8\" (UniqueName: \"kubernetes.io/projected/4b4c78b2-a38d-476f-b88a-089e89d92fef-kube-api-access-k7sp8\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.380751 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-6nfwl"] Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.381015 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" podUID="36d51933-fda4-48e7-a109-f77461bd01b6" containerName="dnsmasq-dns" containerID="cri-o://06e964c99b3fd821858762a7911e280833d507f457e211d28901d8d711e81cd8" gracePeriod=10 Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.382620 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.420825 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-vk92n"] Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.436235 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.493611 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-dns-svc\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.493679 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.493718 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.493779 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.493831 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.493916 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7sp8\" (UniqueName: \"kubernetes.io/projected/4b4c78b2-a38d-476f-b88a-089e89d92fef-kube-api-access-k7sp8\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.494122 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-config\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.494146 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b4c78b2-a38d-476f-b88a-089e89d92fef-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.494199 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-scripts\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.494221 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b82vq\" (UniqueName: \"kubernetes.io/projected/91cb6c81-3062-4ff8-a34e-50a21bc217c0-kube-api-access-b82vq\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.494253 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.499799 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b4c78b2-a38d-476f-b88a-089e89d92fef-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.505391 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.507257 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-scripts\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.516377 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.519132 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.520048 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7sp8\" (UniqueName: \"kubernetes.io/projected/4b4c78b2-a38d-476f-b88a-089e89d92fef-kube-api-access-k7sp8\") pod \"cinder-scheduler-0\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.554043 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-vk92n"] Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.591319 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.593203 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.593377 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.595381 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.596317 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-config\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.596373 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b82vq\" (UniqueName: \"kubernetes.io/projected/91cb6c81-3062-4ff8-a34e-50a21bc217c0-kube-api-access-b82vq\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.596423 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-dns-svc\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.596465 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.596497 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.598084 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.598106 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-config\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.598704 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.600434 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-dns-svc\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.603806 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.632325 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b82vq\" (UniqueName: \"kubernetes.io/projected/91cb6c81-3062-4ff8-a34e-50a21bc217c0-kube-api-access-b82vq\") pod \"dnsmasq-dns-58db5546cc-vk92n\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.694795 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-aa68-account-create-77x5t"] Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.697912 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-aa68-account-create-77x5t" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.701123 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75d3764c-455c-4b0d-98cd-ceaaee77b19e-logs\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.701244 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7sfb\" (UniqueName: \"kubernetes.io/projected/75d3764c-455c-4b0d-98cd-ceaaee77b19e-kube-api-access-z7sfb\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.701484 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75d3764c-455c-4b0d-98cd-ceaaee77b19e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.701587 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data-custom\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.701637 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-scripts\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.701740 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.701776 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.702479 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.704886 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-aa68-account-create-77x5t"] Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.804168 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-scripts\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.804211 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.804244 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.804276 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75d3764c-455c-4b0d-98cd-ceaaee77b19e-logs\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.804331 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7sfb\" (UniqueName: \"kubernetes.io/projected/75d3764c-455c-4b0d-98cd-ceaaee77b19e-kube-api-access-z7sfb\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.804411 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75d3764c-455c-4b0d-98cd-ceaaee77b19e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.804459 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data-custom\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.804482 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf227\" (UniqueName: \"kubernetes.io/projected/deabd048-0a16-41dc-870a-bd5c5594b612-kube-api-access-bf227\") pod \"nova-cell1-aa68-account-create-77x5t\" (UID: \"deabd048-0a16-41dc-870a-bd5c5594b612\") " pod="openstack/nova-cell1-aa68-account-create-77x5t" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.805485 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75d3764c-455c-4b0d-98cd-ceaaee77b19e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.805860 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75d3764c-455c-4b0d-98cd-ceaaee77b19e-logs\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.811210 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-scripts\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.813568 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.825613 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.826151 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data-custom\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.828080 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7sfb\" (UniqueName: \"kubernetes.io/projected/75d3764c-455c-4b0d-98cd-ceaaee77b19e-kube-api-access-z7sfb\") pod \"cinder-api-0\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " pod="openstack/cinder-api-0" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.894537 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.907225 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf227\" (UniqueName: \"kubernetes.io/projected/deabd048-0a16-41dc-870a-bd5c5594b612-kube-api-access-bf227\") pod \"nova-cell1-aa68-account-create-77x5t\" (UID: \"deabd048-0a16-41dc-870a-bd5c5594b612\") " pod="openstack/nova-cell1-aa68-account-create-77x5t" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.939046 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf227\" (UniqueName: \"kubernetes.io/projected/deabd048-0a16-41dc-870a-bd5c5594b612-kube-api-access-bf227\") pod \"nova-cell1-aa68-account-create-77x5t\" (UID: \"deabd048-0a16-41dc-870a-bd5c5594b612\") " pod="openstack/nova-cell1-aa68-account-create-77x5t" Dec 13 03:30:51 crc kubenswrapper[5070]: I1213 03:30:51.945918 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.019695 5070 generic.go:334] "Generic (PLEG): container finished" podID="36d51933-fda4-48e7-a109-f77461bd01b6" containerID="06e964c99b3fd821858762a7911e280833d507f457e211d28901d8d711e81cd8" exitCode=0 Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.019750 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" event={"ID":"36d51933-fda4-48e7-a109-f77461bd01b6","Type":"ContainerDied","Data":"06e964c99b3fd821858762a7911e280833d507f457e211d28901d8d711e81cd8"} Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.089946 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-aa68-account-create-77x5t" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.106825 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.222502 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-config\") pod \"36d51933-fda4-48e7-a109-f77461bd01b6\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.222823 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-sb\") pod \"36d51933-fda4-48e7-a109-f77461bd01b6\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.222854 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-nb\") pod \"36d51933-fda4-48e7-a109-f77461bd01b6\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.222897 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-dns-svc\") pod \"36d51933-fda4-48e7-a109-f77461bd01b6\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.222916 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjwr5\" (UniqueName: \"kubernetes.io/projected/36d51933-fda4-48e7-a109-f77461bd01b6-kube-api-access-fjwr5\") pod \"36d51933-fda4-48e7-a109-f77461bd01b6\" (UID: \"36d51933-fda4-48e7-a109-f77461bd01b6\") " Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.271818 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d51933-fda4-48e7-a109-f77461bd01b6-kube-api-access-fjwr5" (OuterVolumeSpecName: "kube-api-access-fjwr5") pod "36d51933-fda4-48e7-a109-f77461bd01b6" (UID: "36d51933-fda4-48e7-a109-f77461bd01b6"). InnerVolumeSpecName "kube-api-access-fjwr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.312213 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "36d51933-fda4-48e7-a109-f77461bd01b6" (UID: "36d51933-fda4-48e7-a109-f77461bd01b6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.312572 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "36d51933-fda4-48e7-a109-f77461bd01b6" (UID: "36d51933-fda4-48e7-a109-f77461bd01b6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.317364 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-config" (OuterVolumeSpecName: "config") pod "36d51933-fda4-48e7-a109-f77461bd01b6" (UID: "36d51933-fda4-48e7-a109-f77461bd01b6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.332543 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.332691 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.332754 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.332812 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjwr5\" (UniqueName: \"kubernetes.io/projected/36d51933-fda4-48e7-a109-f77461bd01b6-kube-api-access-fjwr5\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.344412 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "36d51933-fda4-48e7-a109-f77461bd01b6" (UID: "36d51933-fda4-48e7-a109-f77461bd01b6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.361096 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.434639 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/36d51933-fda4-48e7-a109-f77461bd01b6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.780370 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-vk92n"] Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.898647 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 13 03:30:52 crc kubenswrapper[5070]: I1213 03:30:52.936082 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-aa68-account-create-77x5t"] Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.096768 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dad639a-f1b7-4430-b213-78603c9d16aa","Type":"ContainerStarted","Data":"8200cea10084f353b27094e266302eecdc4a81b4435011eb8bb9150a4c2c9e7e"} Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.096967 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="ceilometer-central-agent" containerID="cri-o://1e6772ff0a35c3ba899751d41299453744bc0fbd630e218fb1b17b3cd9281a0e" gracePeriod=30 Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.097665 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.097730 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="sg-core" containerID="cri-o://d429d87d122c54a47a3ea93e683f2170f0014e80d514b4ecae48bc796f40f541" gracePeriod=30 Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.097837 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="proxy-httpd" containerID="cri-o://8200cea10084f353b27094e266302eecdc4a81b4435011eb8bb9150a4c2c9e7e" gracePeriod=30 Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.097838 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="ceilometer-notification-agent" containerID="cri-o://c17487b04cbbf4a94d1ba0a99a49413d18e7190ff62ee7d32039b9c4e206bff5" gracePeriod=30 Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.116687 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b4c78b2-a38d-476f-b88a-089e89d92fef","Type":"ContainerStarted","Data":"9ae36b0518206e635e8f6bf728e691724e7c99a9567f39e34ceeb3f84aaf75d4"} Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.123359 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-aa68-account-create-77x5t" event={"ID":"deabd048-0a16-41dc-870a-bd5c5594b612","Type":"ContainerStarted","Data":"481fb45593a08d75c93eca58e2f67c1f937c55821327794b893229410f709e21"} Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.125521 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.578321404 podStartE2EDuration="6.125508503s" podCreationTimestamp="2025-12-13 03:30:47 +0000 UTC" firstStartedPulling="2025-12-13 03:30:48.263966189 +0000 UTC m=+1140.499809735" lastFinishedPulling="2025-12-13 03:30:51.811153278 +0000 UTC m=+1144.046996834" observedRunningTime="2025-12-13 03:30:53.122352037 +0000 UTC m=+1145.358195583" watchObservedRunningTime="2025-12-13 03:30:53.125508503 +0000 UTC m=+1145.361352049" Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.134965 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" event={"ID":"91cb6c81-3062-4ff8-a34e-50a21bc217c0","Type":"ContainerStarted","Data":"a1e6f1e0209ca6b09ea73728cd4961d1dbf603751e48ec9f87190fb0d2904a0f"} Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.144031 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75d3764c-455c-4b0d-98cd-ceaaee77b19e","Type":"ContainerStarted","Data":"592f7dc16d79f97fc0f7dc7e5822d7165bbf26093d6f5e8f2326830a2abaf5f9"} Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.153020 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" event={"ID":"36d51933-fda4-48e7-a109-f77461bd01b6","Type":"ContainerDied","Data":"7192485de5f227e5ee94923d2e8992640e72600608cd96d1ef6993a96fc8883d"} Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.153079 5070 scope.go:117] "RemoveContainer" containerID="06e964c99b3fd821858762a7911e280833d507f457e211d28901d8d711e81cd8" Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.153267 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-6nfwl" Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.254318 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-6nfwl"] Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.286845 5070 scope.go:117] "RemoveContainer" containerID="66e69d857c3230ac50ae71752587513bcdce67e797ddfd6d648f86614530bf7b" Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.288545 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-6nfwl"] Dec 13 03:30:53 crc kubenswrapper[5070]: I1213 03:30:53.600086 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.203808 5070 generic.go:334] "Generic (PLEG): container finished" podID="deabd048-0a16-41dc-870a-bd5c5594b612" containerID="9f6fe4c0efc92637e10e236487fa1ece566a9be43bc770441d7e4b7e61a38094" exitCode=0 Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.203890 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-aa68-account-create-77x5t" event={"ID":"deabd048-0a16-41dc-870a-bd5c5594b612","Type":"ContainerDied","Data":"9f6fe4c0efc92637e10e236487fa1ece566a9be43bc770441d7e4b7e61a38094"} Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.221378 5070 generic.go:334] "Generic (PLEG): container finished" podID="91cb6c81-3062-4ff8-a34e-50a21bc217c0" containerID="a3cb01547ffaeb6d218f14015531c2cce8f36a31801d9800d4b94c131a5f8570" exitCode=0 Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.275066 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36d51933-fda4-48e7-a109-f77461bd01b6" path="/var/lib/kubelet/pods/36d51933-fda4-48e7-a109-f77461bd01b6/volumes" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.279594 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" event={"ID":"91cb6c81-3062-4ff8-a34e-50a21bc217c0","Type":"ContainerDied","Data":"a3cb01547ffaeb6d218f14015531c2cce8f36a31801d9800d4b94c131a5f8570"} Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.280084 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75d3764c-455c-4b0d-98cd-ceaaee77b19e","Type":"ContainerStarted","Data":"f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83"} Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.313536 5070 generic.go:334] "Generic (PLEG): container finished" podID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerID="8200cea10084f353b27094e266302eecdc4a81b4435011eb8bb9150a4c2c9e7e" exitCode=0 Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.313564 5070 generic.go:334] "Generic (PLEG): container finished" podID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerID="d429d87d122c54a47a3ea93e683f2170f0014e80d514b4ecae48bc796f40f541" exitCode=2 Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.313572 5070 generic.go:334] "Generic (PLEG): container finished" podID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerID="c17487b04cbbf4a94d1ba0a99a49413d18e7190ff62ee7d32039b9c4e206bff5" exitCode=0 Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.313578 5070 generic.go:334] "Generic (PLEG): container finished" podID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerID="1e6772ff0a35c3ba899751d41299453744bc0fbd630e218fb1b17b3cd9281a0e" exitCode=0 Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.313600 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dad639a-f1b7-4430-b213-78603c9d16aa","Type":"ContainerDied","Data":"8200cea10084f353b27094e266302eecdc4a81b4435011eb8bb9150a4c2c9e7e"} Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.313626 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dad639a-f1b7-4430-b213-78603c9d16aa","Type":"ContainerDied","Data":"d429d87d122c54a47a3ea93e683f2170f0014e80d514b4ecae48bc796f40f541"} Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.313638 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dad639a-f1b7-4430-b213-78603c9d16aa","Type":"ContainerDied","Data":"c17487b04cbbf4a94d1ba0a99a49413d18e7190ff62ee7d32039b9c4e206bff5"} Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.313647 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dad639a-f1b7-4430-b213-78603c9d16aa","Type":"ContainerDied","Data":"1e6772ff0a35c3ba899751d41299453744bc0fbd630e218fb1b17b3cd9281a0e"} Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.614043 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.694428 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-log-httpd\") pod \"4dad639a-f1b7-4430-b213-78603c9d16aa\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.694561 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-config-data\") pod \"4dad639a-f1b7-4430-b213-78603c9d16aa\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.694625 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zghr9\" (UniqueName: \"kubernetes.io/projected/4dad639a-f1b7-4430-b213-78603c9d16aa-kube-api-access-zghr9\") pod \"4dad639a-f1b7-4430-b213-78603c9d16aa\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.694671 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-combined-ca-bundle\") pod \"4dad639a-f1b7-4430-b213-78603c9d16aa\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.694716 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-sg-core-conf-yaml\") pod \"4dad639a-f1b7-4430-b213-78603c9d16aa\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.694762 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-scripts\") pod \"4dad639a-f1b7-4430-b213-78603c9d16aa\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.694842 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-run-httpd\") pod \"4dad639a-f1b7-4430-b213-78603c9d16aa\" (UID: \"4dad639a-f1b7-4430-b213-78603c9d16aa\") " Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.695739 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4dad639a-f1b7-4430-b213-78603c9d16aa" (UID: "4dad639a-f1b7-4430-b213-78603c9d16aa"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.696075 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4dad639a-f1b7-4430-b213-78603c9d16aa" (UID: "4dad639a-f1b7-4430-b213-78603c9d16aa"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.703947 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-scripts" (OuterVolumeSpecName: "scripts") pod "4dad639a-f1b7-4430-b213-78603c9d16aa" (UID: "4dad639a-f1b7-4430-b213-78603c9d16aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.706379 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dad639a-f1b7-4430-b213-78603c9d16aa-kube-api-access-zghr9" (OuterVolumeSpecName: "kube-api-access-zghr9") pod "4dad639a-f1b7-4430-b213-78603c9d16aa" (UID: "4dad639a-f1b7-4430-b213-78603c9d16aa"). InnerVolumeSpecName "kube-api-access-zghr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.737963 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4dad639a-f1b7-4430-b213-78603c9d16aa" (UID: "4dad639a-f1b7-4430-b213-78603c9d16aa"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.800012 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zghr9\" (UniqueName: \"kubernetes.io/projected/4dad639a-f1b7-4430-b213-78603c9d16aa-kube-api-access-zghr9\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.800364 5070 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.800380 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.800390 5070 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.800459 5070 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dad639a-f1b7-4430-b213-78603c9d16aa-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.828126 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4dad639a-f1b7-4430-b213-78603c9d16aa" (UID: "4dad639a-f1b7-4430-b213-78603c9d16aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.859737 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-config-data" (OuterVolumeSpecName: "config-data") pod "4dad639a-f1b7-4430-b213-78603c9d16aa" (UID: "4dad639a-f1b7-4430-b213-78603c9d16aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.902653 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:54 crc kubenswrapper[5070]: I1213 03:30:54.902673 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dad639a-f1b7-4430-b213-78603c9d16aa-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.353742 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75d3764c-455c-4b0d-98cd-ceaaee77b19e","Type":"ContainerStarted","Data":"aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881"} Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.354134 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.354133 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" containerName="cinder-api-log" containerID="cri-o://f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83" gracePeriod=30 Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.354278 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" containerName="cinder-api" containerID="cri-o://aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881" gracePeriod=30 Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.376369 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.376351129 podStartE2EDuration="4.376351129s" podCreationTimestamp="2025-12-13 03:30:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:30:55.376107482 +0000 UTC m=+1147.611951038" watchObservedRunningTime="2025-12-13 03:30:55.376351129 +0000 UTC m=+1147.612194675" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.380270 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dad639a-f1b7-4430-b213-78603c9d16aa","Type":"ContainerDied","Data":"03b341da32f7abd2a97154d1bc9fadc835fd6b616bcdf42d433c7d12fa11db29"} Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.380485 5070 scope.go:117] "RemoveContainer" containerID="8200cea10084f353b27094e266302eecdc4a81b4435011eb8bb9150a4c2c9e7e" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.380295 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.384948 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b4c78b2-a38d-476f-b88a-089e89d92fef","Type":"ContainerStarted","Data":"fda161a35f795c76afb7c65f4274c50e905d564b116fd577ffd4546f835f8039"} Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.391502 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" event={"ID":"91cb6c81-3062-4ff8-a34e-50a21bc217c0","Type":"ContainerStarted","Data":"fa7f9c76a143cdbe28e14ead44fbf2f58d25ffec447929984f0388216f943980"} Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.391834 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.424509 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" podStartSLOduration=4.424491475 podStartE2EDuration="4.424491475s" podCreationTimestamp="2025-12-13 03:30:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:30:55.411113149 +0000 UTC m=+1147.646956695" watchObservedRunningTime="2025-12-13 03:30:55.424491475 +0000 UTC m=+1147.660335021" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.441134 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.459404 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530242 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:55 crc kubenswrapper[5070]: E1213 03:30:55.530680 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36d51933-fda4-48e7-a109-f77461bd01b6" containerName="init" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530696 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="36d51933-fda4-48e7-a109-f77461bd01b6" containerName="init" Dec 13 03:30:55 crc kubenswrapper[5070]: E1213 03:30:55.530708 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36d51933-fda4-48e7-a109-f77461bd01b6" containerName="dnsmasq-dns" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530716 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="36d51933-fda4-48e7-a109-f77461bd01b6" containerName="dnsmasq-dns" Dec 13 03:30:55 crc kubenswrapper[5070]: E1213 03:30:55.530735 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="sg-core" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530741 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="sg-core" Dec 13 03:30:55 crc kubenswrapper[5070]: E1213 03:30:55.530752 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="ceilometer-central-agent" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530758 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="ceilometer-central-agent" Dec 13 03:30:55 crc kubenswrapper[5070]: E1213 03:30:55.530768 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="proxy-httpd" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530774 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="proxy-httpd" Dec 13 03:30:55 crc kubenswrapper[5070]: E1213 03:30:55.530783 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="ceilometer-notification-agent" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530789 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="ceilometer-notification-agent" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530953 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="sg-core" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530966 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="ceilometer-notification-agent" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530978 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="36d51933-fda4-48e7-a109-f77461bd01b6" containerName="dnsmasq-dns" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.530988 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="ceilometer-central-agent" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.531008 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" containerName="proxy-httpd" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.532695 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.536632 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.537040 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.538865 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.552923 5070 scope.go:117] "RemoveContainer" containerID="d429d87d122c54a47a3ea93e683f2170f0014e80d514b4ecae48bc796f40f541" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.623805 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.631883 5070 scope.go:117] "RemoveContainer" containerID="c17487b04cbbf4a94d1ba0a99a49413d18e7190ff62ee7d32039b9c4e206bff5" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.635544 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-config-data\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.635610 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.635687 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-run-httpd\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.636339 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.636386 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-scripts\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.636506 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-log-httpd\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.636563 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhn6z\" (UniqueName: \"kubernetes.io/projected/3892d787-b7e9-4f06-b629-7c30653b7a43-kube-api-access-vhn6z\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.665730 5070 scope.go:117] "RemoveContainer" containerID="1e6772ff0a35c3ba899751d41299453744bc0fbd630e218fb1b17b3cd9281a0e" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.690759 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.744573 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.745789 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-scripts\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.745853 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-log-httpd\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.745892 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhn6z\" (UniqueName: \"kubernetes.io/projected/3892d787-b7e9-4f06-b629-7c30653b7a43-kube-api-access-vhn6z\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.745964 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-config-data\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.745988 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.746083 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-run-httpd\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.747050 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-run-httpd\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.747516 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-log-httpd\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.762254 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.764356 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-scripts\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.768141 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-config-data\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.781604 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.809295 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhn6z\" (UniqueName: \"kubernetes.io/projected/3892d787-b7e9-4f06-b629-7c30653b7a43-kube-api-access-vhn6z\") pod \"ceilometer-0\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.939495 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:30:55 crc kubenswrapper[5070]: I1213 03:30:55.982682 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-aa68-account-create-77x5t" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.055135 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf227\" (UniqueName: \"kubernetes.io/projected/deabd048-0a16-41dc-870a-bd5c5594b612-kube-api-access-bf227\") pod \"deabd048-0a16-41dc-870a-bd5c5594b612\" (UID: \"deabd048-0a16-41dc-870a-bd5c5594b612\") " Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.067928 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deabd048-0a16-41dc-870a-bd5c5594b612-kube-api-access-bf227" (OuterVolumeSpecName: "kube-api-access-bf227") pod "deabd048-0a16-41dc-870a-bd5c5594b612" (UID: "deabd048-0a16-41dc-870a-bd5c5594b612"). InnerVolumeSpecName "kube-api-access-bf227". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.157206 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf227\" (UniqueName: \"kubernetes.io/projected/deabd048-0a16-41dc-870a-bd5c5594b612-kube-api-access-bf227\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.185289 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dad639a-f1b7-4430-b213-78603c9d16aa" path="/var/lib/kubelet/pods/4dad639a-f1b7-4430-b213-78603c9d16aa/volumes" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.222798 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.360226 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data\") pod \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.360312 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75d3764c-455c-4b0d-98cd-ceaaee77b19e-logs\") pod \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.360368 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data-custom\") pod \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.360615 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7sfb\" (UniqueName: \"kubernetes.io/projected/75d3764c-455c-4b0d-98cd-ceaaee77b19e-kube-api-access-z7sfb\") pod \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.360643 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75d3764c-455c-4b0d-98cd-ceaaee77b19e-etc-machine-id\") pod \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.360686 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-scripts\") pod \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.360718 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-combined-ca-bundle\") pod \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\" (UID: \"75d3764c-455c-4b0d-98cd-ceaaee77b19e\") " Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.360900 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75d3764c-455c-4b0d-98cd-ceaaee77b19e-logs" (OuterVolumeSpecName: "logs") pod "75d3764c-455c-4b0d-98cd-ceaaee77b19e" (UID: "75d3764c-455c-4b0d-98cd-ceaaee77b19e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.361098 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/75d3764c-455c-4b0d-98cd-ceaaee77b19e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "75d3764c-455c-4b0d-98cd-ceaaee77b19e" (UID: "75d3764c-455c-4b0d-98cd-ceaaee77b19e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.367884 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75d3764c-455c-4b0d-98cd-ceaaee77b19e-logs\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.367914 5070 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75d3764c-455c-4b0d-98cd-ceaaee77b19e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.380036 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-scripts" (OuterVolumeSpecName: "scripts") pod "75d3764c-455c-4b0d-98cd-ceaaee77b19e" (UID: "75d3764c-455c-4b0d-98cd-ceaaee77b19e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.380982 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75d3764c-455c-4b0d-98cd-ceaaee77b19e-kube-api-access-z7sfb" (OuterVolumeSpecName: "kube-api-access-z7sfb") pod "75d3764c-455c-4b0d-98cd-ceaaee77b19e" (UID: "75d3764c-455c-4b0d-98cd-ceaaee77b19e"). InnerVolumeSpecName "kube-api-access-z7sfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.384601 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "75d3764c-455c-4b0d-98cd-ceaaee77b19e" (UID: "75d3764c-455c-4b0d-98cd-ceaaee77b19e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.401521 5070 generic.go:334] "Generic (PLEG): container finished" podID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" containerID="aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881" exitCode=0 Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.401848 5070 generic.go:334] "Generic (PLEG): container finished" podID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" containerID="f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83" exitCode=143 Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.401588 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.401607 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75d3764c-455c-4b0d-98cd-ceaaee77b19e","Type":"ContainerDied","Data":"aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881"} Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.402134 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75d3764c-455c-4b0d-98cd-ceaaee77b19e","Type":"ContainerDied","Data":"f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83"} Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.402151 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75d3764c-455c-4b0d-98cd-ceaaee77b19e","Type":"ContainerDied","Data":"592f7dc16d79f97fc0f7dc7e5822d7165bbf26093d6f5e8f2326830a2abaf5f9"} Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.402170 5070 scope.go:117] "RemoveContainer" containerID="aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.405820 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75d3764c-455c-4b0d-98cd-ceaaee77b19e" (UID: "75d3764c-455c-4b0d-98cd-ceaaee77b19e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.414158 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b4c78b2-a38d-476f-b88a-089e89d92fef","Type":"ContainerStarted","Data":"d8c0b2df5788ed9d47f087efae84629420a6cf1956558ddb519c6b2f971f72f2"} Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.422738 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-aa68-account-create-77x5t" event={"ID":"deabd048-0a16-41dc-870a-bd5c5594b612","Type":"ContainerDied","Data":"481fb45593a08d75c93eca58e2f67c1f937c55821327794b893229410f709e21"} Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.422932 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="481fb45593a08d75c93eca58e2f67c1f937c55821327794b893229410f709e21" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.422796 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-aa68-account-create-77x5t" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.426215 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data" (OuterVolumeSpecName: "config-data") pod "75d3764c-455c-4b0d-98cd-ceaaee77b19e" (UID: "75d3764c-455c-4b0d-98cd-ceaaee77b19e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.430191 5070 scope.go:117] "RemoveContainer" containerID="f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.440783 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.277584 podStartE2EDuration="5.440766725s" podCreationTimestamp="2025-12-13 03:30:51 +0000 UTC" firstStartedPulling="2025-12-13 03:30:52.36602411 +0000 UTC m=+1144.601867656" lastFinishedPulling="2025-12-13 03:30:53.529206835 +0000 UTC m=+1145.765050381" observedRunningTime="2025-12-13 03:30:56.432617852 +0000 UTC m=+1148.668461398" watchObservedRunningTime="2025-12-13 03:30:56.440766725 +0000 UTC m=+1148.676610261" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.465877 5070 scope.go:117] "RemoveContainer" containerID="aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881" Dec 13 03:30:56 crc kubenswrapper[5070]: E1213 03:30:56.471419 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881\": container with ID starting with aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881 not found: ID does not exist" containerID="aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.475313 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881"} err="failed to get container status \"aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881\": rpc error: code = NotFound desc = could not find container \"aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881\": container with ID starting with aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881 not found: ID does not exist" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.475568 5070 scope.go:117] "RemoveContainer" containerID="f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.471832 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7sfb\" (UniqueName: \"kubernetes.io/projected/75d3764c-455c-4b0d-98cd-ceaaee77b19e-kube-api-access-z7sfb\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.475861 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.475930 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.476027 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.476144 5070 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75d3764c-455c-4b0d-98cd-ceaaee77b19e-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 03:30:56 crc kubenswrapper[5070]: E1213 03:30:56.484665 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83\": container with ID starting with f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83 not found: ID does not exist" containerID="f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.484832 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83"} err="failed to get container status \"f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83\": rpc error: code = NotFound desc = could not find container \"f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83\": container with ID starting with f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83 not found: ID does not exist" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.484875 5070 scope.go:117] "RemoveContainer" containerID="aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.487072 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881"} err="failed to get container status \"aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881\": rpc error: code = NotFound desc = could not find container \"aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881\": container with ID starting with aa1b59cb73538e8649000482ba6681b2dfb8f6ef664a9faea61b115915d81881 not found: ID does not exist" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.487112 5070 scope.go:117] "RemoveContainer" containerID="f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.495061 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83"} err="failed to get container status \"f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83\": rpc error: code = NotFound desc = could not find container \"f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83\": container with ID starting with f299967b97664e422949f325946ae0e686d59c0a32ef43217c14515bbf464f83 not found: ID does not exist" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.501483 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.593874 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.767741 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.813865 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.831526 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 13 03:30:56 crc kubenswrapper[5070]: E1213 03:30:56.831987 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deabd048-0a16-41dc-870a-bd5c5594b612" containerName="mariadb-account-create" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.832012 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="deabd048-0a16-41dc-870a-bd5c5594b612" containerName="mariadb-account-create" Dec 13 03:30:56 crc kubenswrapper[5070]: E1213 03:30:56.832053 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" containerName="cinder-api-log" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.832062 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" containerName="cinder-api-log" Dec 13 03:30:56 crc kubenswrapper[5070]: E1213 03:30:56.832074 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" containerName="cinder-api" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.832081 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" containerName="cinder-api" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.832298 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" containerName="cinder-api" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.832315 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="deabd048-0a16-41dc-870a-bd5c5594b612" containerName="mariadb-account-create" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.832335 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" containerName="cinder-api-log" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.833509 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.848024 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.848596 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.848828 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.854346 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.905471 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d14911c-95e8-4604-af3a-efcea2eb4b73-logs\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.905718 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d14911c-95e8-4604-af3a-efcea2eb4b73-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.905974 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg2tw\" (UniqueName: \"kubernetes.io/projected/6d14911c-95e8-4604-af3a-efcea2eb4b73-kube-api-access-pg2tw\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.906081 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.906207 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-config-data-custom\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.906363 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-config-data\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.906462 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-scripts\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.906575 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:56 crc kubenswrapper[5070]: I1213 03:30:56.906666 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.011615 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.011953 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.012146 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d14911c-95e8-4604-af3a-efcea2eb4b73-logs\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.012253 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d14911c-95e8-4604-af3a-efcea2eb4b73-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.012470 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg2tw\" (UniqueName: \"kubernetes.io/projected/6d14911c-95e8-4604-af3a-efcea2eb4b73-kube-api-access-pg2tw\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.012586 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.012820 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-config-data-custom\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.012968 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-config-data\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.013077 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-scripts\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.015957 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d14911c-95e8-4604-af3a-efcea2eb4b73-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.016400 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d14911c-95e8-4604-af3a-efcea2eb4b73-logs\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.026572 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-scripts\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.027477 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.033694 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg2tw\" (UniqueName: \"kubernetes.io/projected/6d14911c-95e8-4604-af3a-efcea2eb4b73-kube-api-access-pg2tw\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.039293 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.039597 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-config-data-custom\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.044496 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-config-data\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.050913 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d14911c-95e8-4604-af3a-efcea2eb4b73-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6d14911c-95e8-4604-af3a-efcea2eb4b73\") " pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.189373 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.454050 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3892d787-b7e9-4f06-b629-7c30653b7a43","Type":"ContainerStarted","Data":"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7"} Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.454381 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3892d787-b7e9-4f06-b629-7c30653b7a43","Type":"ContainerStarted","Data":"9258fed628f69862714753559333ea92706ddff95ac28a3662fb68549714437e"} Dec 13 03:30:57 crc kubenswrapper[5070]: I1213 03:30:57.707353 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 13 03:30:58 crc kubenswrapper[5070]: I1213 03:30:58.184809 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75d3764c-455c-4b0d-98cd-ceaaee77b19e" path="/var/lib/kubelet/pods/75d3764c-455c-4b0d-98cd-ceaaee77b19e/volumes" Dec 13 03:30:58 crc kubenswrapper[5070]: I1213 03:30:58.466927 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6d14911c-95e8-4604-af3a-efcea2eb4b73","Type":"ContainerStarted","Data":"d9af71c298d3a29be19cafe3b958cab4fce97046d05cfa522529f3379e620044"} Dec 13 03:30:58 crc kubenswrapper[5070]: I1213 03:30:58.467279 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6d14911c-95e8-4604-af3a-efcea2eb4b73","Type":"ContainerStarted","Data":"1b26ad566df845e24b5b1708eba69336e4b51c58fceb93a8701ad7a6199d90b9"} Dec 13 03:30:58 crc kubenswrapper[5070]: I1213 03:30:58.468420 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3892d787-b7e9-4f06-b629-7c30653b7a43","Type":"ContainerStarted","Data":"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155"} Dec 13 03:30:58 crc kubenswrapper[5070]: I1213 03:30:58.555098 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:58 crc kubenswrapper[5070]: I1213 03:30:58.591873 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7d678df9bd-j8bfh" Dec 13 03:30:58 crc kubenswrapper[5070]: I1213 03:30:58.649911 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-55d578b79b-csd56"] Dec 13 03:30:58 crc kubenswrapper[5070]: I1213 03:30:58.650269 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-55d578b79b-csd56" podUID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" containerName="barbican-api" containerID="cri-o://bbf94aa09aa833afd86624f0cb7cbffce48432a33c75dd9aa4acf582cc28ddf7" gracePeriod=30 Dec 13 03:30:58 crc kubenswrapper[5070]: I1213 03:30:58.650597 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-55d578b79b-csd56" podUID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" containerName="barbican-api-log" containerID="cri-o://658955e06043690e449625229edeb7e8d88533763fee88adbecaa31c566e2e8b" gracePeriod=30 Dec 13 03:30:59 crc kubenswrapper[5070]: I1213 03:30:59.486509 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6d14911c-95e8-4604-af3a-efcea2eb4b73","Type":"ContainerStarted","Data":"3bafc0482893f50b5b8b1407d47b5acfaeef74e50aa18cc03d463f1c298551ed"} Dec 13 03:30:59 crc kubenswrapper[5070]: I1213 03:30:59.487104 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 13 03:30:59 crc kubenswrapper[5070]: I1213 03:30:59.491934 5070 generic.go:334] "Generic (PLEG): container finished" podID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" containerID="658955e06043690e449625229edeb7e8d88533763fee88adbecaa31c566e2e8b" exitCode=143 Dec 13 03:30:59 crc kubenswrapper[5070]: I1213 03:30:59.492027 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-55d578b79b-csd56" event={"ID":"0eb4c370-92f7-42ac-8362-5e4bd8b3b504","Type":"ContainerDied","Data":"658955e06043690e449625229edeb7e8d88533763fee88adbecaa31c566e2e8b"} Dec 13 03:30:59 crc kubenswrapper[5070]: I1213 03:30:59.495394 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3892d787-b7e9-4f06-b629-7c30653b7a43","Type":"ContainerStarted","Data":"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31"} Dec 13 03:30:59 crc kubenswrapper[5070]: I1213 03:30:59.504506 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.504489094 podStartE2EDuration="3.504489094s" podCreationTimestamp="2025-12-13 03:30:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:30:59.502950621 +0000 UTC m=+1151.738794167" watchObservedRunningTime="2025-12-13 03:30:59.504489094 +0000 UTC m=+1151.740332640" Dec 13 03:31:00 crc kubenswrapper[5070]: I1213 03:31:00.151068 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.516700 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-e920-account-create-c57q5"] Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.518522 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e920-account-create-c57q5" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.519116 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3892d787-b7e9-4f06-b629-7c30653b7a43","Type":"ContainerStarted","Data":"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835"} Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.519844 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.520499 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.546669 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-e920-account-create-c57q5"] Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.604032 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.660872725 podStartE2EDuration="6.604009754s" podCreationTimestamp="2025-12-13 03:30:55 +0000 UTC" firstStartedPulling="2025-12-13 03:30:56.503934961 +0000 UTC m=+1148.739778507" lastFinishedPulling="2025-12-13 03:31:00.44707197 +0000 UTC m=+1152.682915536" observedRunningTime="2025-12-13 03:31:01.561550734 +0000 UTC m=+1153.797394280" watchObservedRunningTime="2025-12-13 03:31:01.604009754 +0000 UTC m=+1153.839853300" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.628091 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx7vd\" (UniqueName: \"kubernetes.io/projected/528f5eba-2b8a-478c-a09f-6e97729d2b31-kube-api-access-mx7vd\") pod \"nova-api-e920-account-create-c57q5\" (UID: \"528f5eba-2b8a-478c-a09f-6e97729d2b31\") " pod="openstack/nova-api-e920-account-create-c57q5" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.714077 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-0b75-account-create-f5fw5"] Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.715396 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0b75-account-create-f5fw5" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.717549 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.729185 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0b75-account-create-f5fw5"] Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.729800 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx7vd\" (UniqueName: \"kubernetes.io/projected/528f5eba-2b8a-478c-a09f-6e97729d2b31-kube-api-access-mx7vd\") pod \"nova-api-e920-account-create-c57q5\" (UID: \"528f5eba-2b8a-478c-a09f-6e97729d2b31\") " pod="openstack/nova-api-e920-account-create-c57q5" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.750308 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx7vd\" (UniqueName: \"kubernetes.io/projected/528f5eba-2b8a-478c-a09f-6e97729d2b31-kube-api-access-mx7vd\") pod \"nova-api-e920-account-create-c57q5\" (UID: \"528f5eba-2b8a-478c-a09f-6e97729d2b31\") " pod="openstack/nova-api-e920-account-create-c57q5" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.831086 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l88vx\" (UniqueName: \"kubernetes.io/projected/d30e4476-3bb4-4ac3-b30f-67d2900ab0c1-kube-api-access-l88vx\") pod \"nova-cell0-0b75-account-create-f5fw5\" (UID: \"d30e4476-3bb4-4ac3-b30f-67d2900ab0c1\") " pod="openstack/nova-cell0-0b75-account-create-f5fw5" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.845646 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e920-account-create-c57q5" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.863983 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.899420 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.916256 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.932869 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l88vx\" (UniqueName: \"kubernetes.io/projected/d30e4476-3bb4-4ac3-b30f-67d2900ab0c1-kube-api-access-l88vx\") pod \"nova-cell0-0b75-account-create-f5fw5\" (UID: \"d30e4476-3bb4-4ac3-b30f-67d2900ab0c1\") " pod="openstack/nova-cell0-0b75-account-create-f5fw5" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.976318 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l88vx\" (UniqueName: \"kubernetes.io/projected/d30e4476-3bb4-4ac3-b30f-67d2900ab0c1-kube-api-access-l88vx\") pod \"nova-cell0-0b75-account-create-f5fw5\" (UID: \"d30e4476-3bb4-4ac3-b30f-67d2900ab0c1\") " pod="openstack/nova-cell0-0b75-account-create-f5fw5" Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.998595 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-89998"] Dec 13 03:31:01 crc kubenswrapper[5070]: I1213 03:31:01.998884 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f66db59b9-89998" podUID="6e76ff7f-507c-4d7f-baf0-b1967eae5fff" containerName="dnsmasq-dns" containerID="cri-o://6085670a963d85e0967bff96dea16f6732e714a003a6b832f2249a3ead6eb11d" gracePeriod=10 Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.033752 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0b75-account-create-f5fw5" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.413160 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.427568 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-e920-account-create-c57q5"] Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.455600 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-54586d498f-pgsrg" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.529817 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-64f646c4fb-ktrsv"] Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.530074 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-64f646c4fb-ktrsv" podUID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" containerName="neutron-api" containerID="cri-o://9b5413f46bce5d9fb208db75baa625809e81164c0f0a7c77e14239e069537396" gracePeriod=30 Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.530557 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-64f646c4fb-ktrsv" podUID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" containerName="neutron-httpd" containerID="cri-o://a54a2e955030fa01e6711e092c1c18c7b1e5d91787c10c59117de8ab23fd71c1" gracePeriod=30 Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.536383 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e920-account-create-c57q5" event={"ID":"528f5eba-2b8a-478c-a09f-6e97729d2b31","Type":"ContainerStarted","Data":"8f5370850edf468ef6d64490a98336cc10d917cd176b3517b26315acb1471633"} Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.542626 5070 generic.go:334] "Generic (PLEG): container finished" podID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" containerID="bbf94aa09aa833afd86624f0cb7cbffce48432a33c75dd9aa4acf582cc28ddf7" exitCode=0 Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.542660 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-55d578b79b-csd56" event={"ID":"0eb4c370-92f7-42ac-8362-5e4bd8b3b504","Type":"ContainerDied","Data":"bbf94aa09aa833afd86624f0cb7cbffce48432a33c75dd9aa4acf582cc28ddf7"} Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.546728 5070 generic.go:334] "Generic (PLEG): container finished" podID="6e76ff7f-507c-4d7f-baf0-b1967eae5fff" containerID="6085670a963d85e0967bff96dea16f6732e714a003a6b832f2249a3ead6eb11d" exitCode=0 Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.546967 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="4b4c78b2-a38d-476f-b88a-089e89d92fef" containerName="cinder-scheduler" containerID="cri-o://fda161a35f795c76afb7c65f4274c50e905d564b116fd577ffd4546f835f8039" gracePeriod=30 Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.547080 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-89998" event={"ID":"6e76ff7f-507c-4d7f-baf0-b1967eae5fff","Type":"ContainerDied","Data":"6085670a963d85e0967bff96dea16f6732e714a003a6b832f2249a3ead6eb11d"} Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.547514 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="4b4c78b2-a38d-476f-b88a-089e89d92fef" containerName="probe" containerID="cri-o://d8c0b2df5788ed9d47f087efae84629420a6cf1956558ddb519c6b2f971f72f2" gracePeriod=30 Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.627587 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.645246 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:31:02 crc kubenswrapper[5070]: W1213 03:31:02.736311 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd30e4476_3bb4_4ac3_b30f_67d2900ab0c1.slice/crio-68069a2adb9fd9f36f386a380dc56bd2b2b7780d79cc28c141547ca0c748b345 WatchSource:0}: Error finding container 68069a2adb9fd9f36f386a380dc56bd2b2b7780d79cc28c141547ca0c748b345: Status 404 returned error can't find the container with id 68069a2adb9fd9f36f386a380dc56bd2b2b7780d79cc28c141547ca0c748b345 Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.744841 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0b75-account-create-f5fw5"] Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.760326 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-combined-ca-bundle\") pod \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.760370 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-nb\") pod \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.760396 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-logs\") pod \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.760434 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z25b5\" (UniqueName: \"kubernetes.io/projected/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-kube-api-access-z25b5\") pod \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.760509 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cp87m\" (UniqueName: \"kubernetes.io/projected/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-kube-api-access-cp87m\") pod \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.760540 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-sb\") pod \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.760661 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data-custom\") pod \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.760769 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-dns-svc\") pod \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.760857 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-config\") pod \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\" (UID: \"6e76ff7f-507c-4d7f-baf0-b1967eae5fff\") " Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.760894 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data\") pod \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\" (UID: \"0eb4c370-92f7-42ac-8362-5e4bd8b3b504\") " Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.761184 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-logs" (OuterVolumeSpecName: "logs") pod "0eb4c370-92f7-42ac-8362-5e4bd8b3b504" (UID: "0eb4c370-92f7-42ac-8362-5e4bd8b3b504"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.761297 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-logs\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.766412 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0eb4c370-92f7-42ac-8362-5e4bd8b3b504" (UID: "0eb4c370-92f7-42ac-8362-5e4bd8b3b504"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.767007 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-kube-api-access-z25b5" (OuterVolumeSpecName: "kube-api-access-z25b5") pod "0eb4c370-92f7-42ac-8362-5e4bd8b3b504" (UID: "0eb4c370-92f7-42ac-8362-5e4bd8b3b504"). InnerVolumeSpecName "kube-api-access-z25b5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.769866 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-kube-api-access-cp87m" (OuterVolumeSpecName: "kube-api-access-cp87m") pod "6e76ff7f-507c-4d7f-baf0-b1967eae5fff" (UID: "6e76ff7f-507c-4d7f-baf0-b1967eae5fff"). InnerVolumeSpecName "kube-api-access-cp87m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.816335 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0eb4c370-92f7-42ac-8362-5e4bd8b3b504" (UID: "0eb4c370-92f7-42ac-8362-5e4bd8b3b504"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.839016 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6e76ff7f-507c-4d7f-baf0-b1967eae5fff" (UID: "6e76ff7f-507c-4d7f-baf0-b1967eae5fff"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.862815 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.862850 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z25b5\" (UniqueName: \"kubernetes.io/projected/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-kube-api-access-z25b5\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.862859 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cp87m\" (UniqueName: \"kubernetes.io/projected/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-kube-api-access-cp87m\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.862868 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.862876 5070 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.874663 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-config" (OuterVolumeSpecName: "config") pod "6e76ff7f-507c-4d7f-baf0-b1967eae5fff" (UID: "6e76ff7f-507c-4d7f-baf0-b1967eae5fff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.876836 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data" (OuterVolumeSpecName: "config-data") pod "0eb4c370-92f7-42ac-8362-5e4bd8b3b504" (UID: "0eb4c370-92f7-42ac-8362-5e4bd8b3b504"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.881150 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6e76ff7f-507c-4d7f-baf0-b1967eae5fff" (UID: "6e76ff7f-507c-4d7f-baf0-b1967eae5fff"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.886554 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6e76ff7f-507c-4d7f-baf0-b1967eae5fff" (UID: "6e76ff7f-507c-4d7f-baf0-b1967eae5fff"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.964956 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.964993 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb4c370-92f7-42ac-8362-5e4bd8b3b504-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.965002 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:02 crc kubenswrapper[5070]: I1213 03:31:02.965012 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e76ff7f-507c-4d7f-baf0-b1967eae5fff-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.556213 5070 generic.go:334] "Generic (PLEG): container finished" podID="4b4c78b2-a38d-476f-b88a-089e89d92fef" containerID="d8c0b2df5788ed9d47f087efae84629420a6cf1956558ddb519c6b2f971f72f2" exitCode=0 Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.556322 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b4c78b2-a38d-476f-b88a-089e89d92fef","Type":"ContainerDied","Data":"d8c0b2df5788ed9d47f087efae84629420a6cf1956558ddb519c6b2f971f72f2"} Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.559653 5070 generic.go:334] "Generic (PLEG): container finished" podID="d30e4476-3bb4-4ac3-b30f-67d2900ab0c1" containerID="fab62b0e7c57d7e1c178aa10adeed51a72cfe52cdbbc824e57321a3b7b693f33" exitCode=0 Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.559761 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0b75-account-create-f5fw5" event={"ID":"d30e4476-3bb4-4ac3-b30f-67d2900ab0c1","Type":"ContainerDied","Data":"fab62b0e7c57d7e1c178aa10adeed51a72cfe52cdbbc824e57321a3b7b693f33"} Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.559834 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0b75-account-create-f5fw5" event={"ID":"d30e4476-3bb4-4ac3-b30f-67d2900ab0c1","Type":"ContainerStarted","Data":"68069a2adb9fd9f36f386a380dc56bd2b2b7780d79cc28c141547ca0c748b345"} Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.561998 5070 generic.go:334] "Generic (PLEG): container finished" podID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" containerID="a54a2e955030fa01e6711e092c1c18c7b1e5d91787c10c59117de8ab23fd71c1" exitCode=0 Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.562047 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64f646c4fb-ktrsv" event={"ID":"d7d71324-fddc-4a3a-ab67-164b8d90b2d3","Type":"ContainerDied","Data":"a54a2e955030fa01e6711e092c1c18c7b1e5d91787c10c59117de8ab23fd71c1"} Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.563457 5070 generic.go:334] "Generic (PLEG): container finished" podID="528f5eba-2b8a-478c-a09f-6e97729d2b31" containerID="63906721537da9234e81e89109061e80a164247eefe2b755473c3d94f88ea6a2" exitCode=0 Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.563519 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e920-account-create-c57q5" event={"ID":"528f5eba-2b8a-478c-a09f-6e97729d2b31","Type":"ContainerDied","Data":"63906721537da9234e81e89109061e80a164247eefe2b755473c3d94f88ea6a2"} Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.565124 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-55d578b79b-csd56" event={"ID":"0eb4c370-92f7-42ac-8362-5e4bd8b3b504","Type":"ContainerDied","Data":"27a12762893af057f1a698a7139f331de0b82f58625e4823e241833073fdeb48"} Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.565151 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-55d578b79b-csd56" Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.565272 5070 scope.go:117] "RemoveContainer" containerID="bbf94aa09aa833afd86624f0cb7cbffce48432a33c75dd9aa4acf582cc28ddf7" Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.567542 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="ceilometer-central-agent" containerID="cri-o://a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7" gracePeriod=30 Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.567657 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-89998" Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.568273 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-89998" event={"ID":"6e76ff7f-507c-4d7f-baf0-b1967eae5fff","Type":"ContainerDied","Data":"afdfdb49e2d63132f8248afd5540b5360242547ce02cffdceec60b24616d14ac"} Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.568342 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="sg-core" containerID="cri-o://ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31" gracePeriod=30 Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.568393 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="proxy-httpd" containerID="cri-o://5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835" gracePeriod=30 Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.568450 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="ceilometer-notification-agent" containerID="cri-o://0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155" gracePeriod=30 Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.600612 5070 scope.go:117] "RemoveContainer" containerID="658955e06043690e449625229edeb7e8d88533763fee88adbecaa31c566e2e8b" Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.635054 5070 scope.go:117] "RemoveContainer" containerID="6085670a963d85e0967bff96dea16f6732e714a003a6b832f2249a3ead6eb11d" Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.668709 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-55d578b79b-csd56"] Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.669400 5070 scope.go:117] "RemoveContainer" containerID="ea49936432c1ff6650754b48500ee832625315fe5d6f606827d5dbf62ff16a94" Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.703601 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-55d578b79b-csd56"] Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.717892 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-89998"] Dec 13 03:31:03 crc kubenswrapper[5070]: I1213 03:31:03.727240 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-89998"] Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.179882 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" path="/var/lib/kubelet/pods/0eb4c370-92f7-42ac-8362-5e4bd8b3b504/volumes" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.180781 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e76ff7f-507c-4d7f-baf0-b1967eae5fff" path="/var/lib/kubelet/pods/6e76ff7f-507c-4d7f-baf0-b1967eae5fff/volumes" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.318373 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.397916 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-combined-ca-bundle\") pod \"3892d787-b7e9-4f06-b629-7c30653b7a43\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.397974 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-scripts\") pod \"3892d787-b7e9-4f06-b629-7c30653b7a43\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.397999 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhn6z\" (UniqueName: \"kubernetes.io/projected/3892d787-b7e9-4f06-b629-7c30653b7a43-kube-api-access-vhn6z\") pod \"3892d787-b7e9-4f06-b629-7c30653b7a43\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.398083 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-log-httpd\") pod \"3892d787-b7e9-4f06-b629-7c30653b7a43\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.398141 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-config-data\") pod \"3892d787-b7e9-4f06-b629-7c30653b7a43\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.398165 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-sg-core-conf-yaml\") pod \"3892d787-b7e9-4f06-b629-7c30653b7a43\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.398197 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-run-httpd\") pod \"3892d787-b7e9-4f06-b629-7c30653b7a43\" (UID: \"3892d787-b7e9-4f06-b629-7c30653b7a43\") " Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.398927 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3892d787-b7e9-4f06-b629-7c30653b7a43" (UID: "3892d787-b7e9-4f06-b629-7c30653b7a43"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.399045 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3892d787-b7e9-4f06-b629-7c30653b7a43" (UID: "3892d787-b7e9-4f06-b629-7c30653b7a43"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.403201 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-scripts" (OuterVolumeSpecName: "scripts") pod "3892d787-b7e9-4f06-b629-7c30653b7a43" (UID: "3892d787-b7e9-4f06-b629-7c30653b7a43"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.403435 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3892d787-b7e9-4f06-b629-7c30653b7a43-kube-api-access-vhn6z" (OuterVolumeSpecName: "kube-api-access-vhn6z") pod "3892d787-b7e9-4f06-b629-7c30653b7a43" (UID: "3892d787-b7e9-4f06-b629-7c30653b7a43"). InnerVolumeSpecName "kube-api-access-vhn6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.420728 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3892d787-b7e9-4f06-b629-7c30653b7a43" (UID: "3892d787-b7e9-4f06-b629-7c30653b7a43"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.463269 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3892d787-b7e9-4f06-b629-7c30653b7a43" (UID: "3892d787-b7e9-4f06-b629-7c30653b7a43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.500104 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.500135 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.500145 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhn6z\" (UniqueName: \"kubernetes.io/projected/3892d787-b7e9-4f06-b629-7c30653b7a43-kube-api-access-vhn6z\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.500159 5070 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.500172 5070 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.500184 5070 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3892d787-b7e9-4f06-b629-7c30653b7a43-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.500260 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-config-data" (OuterVolumeSpecName: "config-data") pod "3892d787-b7e9-4f06-b629-7c30653b7a43" (UID: "3892d787-b7e9-4f06-b629-7c30653b7a43"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.578549 5070 generic.go:334] "Generic (PLEG): container finished" podID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerID="5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835" exitCode=0 Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.578891 5070 generic.go:334] "Generic (PLEG): container finished" podID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerID="ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31" exitCode=2 Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.578902 5070 generic.go:334] "Generic (PLEG): container finished" podID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerID="0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155" exitCode=0 Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.578627 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.578911 5070 generic.go:334] "Generic (PLEG): container finished" podID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerID="a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7" exitCode=0 Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.578641 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3892d787-b7e9-4f06-b629-7c30653b7a43","Type":"ContainerDied","Data":"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835"} Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.579124 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3892d787-b7e9-4f06-b629-7c30653b7a43","Type":"ContainerDied","Data":"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31"} Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.579150 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3892d787-b7e9-4f06-b629-7c30653b7a43","Type":"ContainerDied","Data":"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155"} Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.579165 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3892d787-b7e9-4f06-b629-7c30653b7a43","Type":"ContainerDied","Data":"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7"} Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.579177 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3892d787-b7e9-4f06-b629-7c30653b7a43","Type":"ContainerDied","Data":"9258fed628f69862714753559333ea92706ddff95ac28a3662fb68549714437e"} Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.579196 5070 scope.go:117] "RemoveContainer" containerID="5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.602714 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3892d787-b7e9-4f06-b629-7c30653b7a43-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.646921 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.657662 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.673631 5070 scope.go:117] "RemoveContainer" containerID="ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.681880 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.682346 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="ceilometer-central-agent" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682371 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="ceilometer-central-agent" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.682384 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e76ff7f-507c-4d7f-baf0-b1967eae5fff" containerName="init" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682393 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e76ff7f-507c-4d7f-baf0-b1967eae5fff" containerName="init" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.682412 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="sg-core" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682419 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="sg-core" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.682432 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" containerName="barbican-api-log" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682459 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" containerName="barbican-api-log" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.682485 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" containerName="barbican-api" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682495 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" containerName="barbican-api" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.682513 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e76ff7f-507c-4d7f-baf0-b1967eae5fff" containerName="dnsmasq-dns" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682520 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e76ff7f-507c-4d7f-baf0-b1967eae5fff" containerName="dnsmasq-dns" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.682529 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="proxy-httpd" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682537 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="proxy-httpd" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.682559 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="ceilometer-notification-agent" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682566 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="ceilometer-notification-agent" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682762 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" containerName="barbican-api-log" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682777 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="ceilometer-notification-agent" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682791 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eb4c370-92f7-42ac-8362-5e4bd8b3b504" containerName="barbican-api" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682799 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="proxy-httpd" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682815 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="sg-core" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682826 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e76ff7f-507c-4d7f-baf0-b1967eae5fff" containerName="dnsmasq-dns" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.682834 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" containerName="ceilometer-central-agent" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.685160 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.687985 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.688134 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.704568 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.720929 5070 scope.go:117] "RemoveContainer" containerID="0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.743609 5070 scope.go:117] "RemoveContainer" containerID="a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.774033 5070 scope.go:117] "RemoveContainer" containerID="5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.774782 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835\": container with ID starting with 5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835 not found: ID does not exist" containerID="5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.774829 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835"} err="failed to get container status \"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835\": rpc error: code = NotFound desc = could not find container \"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835\": container with ID starting with 5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.774859 5070 scope.go:117] "RemoveContainer" containerID="ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.776737 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31\": container with ID starting with ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31 not found: ID does not exist" containerID="ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.776776 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31"} err="failed to get container status \"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31\": rpc error: code = NotFound desc = could not find container \"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31\": container with ID starting with ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.776809 5070 scope.go:117] "RemoveContainer" containerID="0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.778732 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155\": container with ID starting with 0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155 not found: ID does not exist" containerID="0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.778769 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155"} err="failed to get container status \"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155\": rpc error: code = NotFound desc = could not find container \"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155\": container with ID starting with 0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.778794 5070 scope.go:117] "RemoveContainer" containerID="a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7" Dec 13 03:31:04 crc kubenswrapper[5070]: E1213 03:31:04.779225 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7\": container with ID starting with a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7 not found: ID does not exist" containerID="a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.779247 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7"} err="failed to get container status \"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7\": rpc error: code = NotFound desc = could not find container \"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7\": container with ID starting with a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.779262 5070 scope.go:117] "RemoveContainer" containerID="5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.781842 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835"} err="failed to get container status \"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835\": rpc error: code = NotFound desc = could not find container \"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835\": container with ID starting with 5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.781858 5070 scope.go:117] "RemoveContainer" containerID="ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.782081 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31"} err="failed to get container status \"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31\": rpc error: code = NotFound desc = could not find container \"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31\": container with ID starting with ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.782095 5070 scope.go:117] "RemoveContainer" containerID="0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.782318 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155"} err="failed to get container status \"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155\": rpc error: code = NotFound desc = could not find container \"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155\": container with ID starting with 0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.782336 5070 scope.go:117] "RemoveContainer" containerID="a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.782565 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7"} err="failed to get container status \"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7\": rpc error: code = NotFound desc = could not find container \"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7\": container with ID starting with a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.782578 5070 scope.go:117] "RemoveContainer" containerID="5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.782764 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835"} err="failed to get container status \"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835\": rpc error: code = NotFound desc = could not find container \"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835\": container with ID starting with 5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.782778 5070 scope.go:117] "RemoveContainer" containerID="ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.782932 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31"} err="failed to get container status \"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31\": rpc error: code = NotFound desc = could not find container \"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31\": container with ID starting with ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.782952 5070 scope.go:117] "RemoveContainer" containerID="0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.783129 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155"} err="failed to get container status \"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155\": rpc error: code = NotFound desc = could not find container \"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155\": container with ID starting with 0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.783150 5070 scope.go:117] "RemoveContainer" containerID="a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.783356 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7"} err="failed to get container status \"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7\": rpc error: code = NotFound desc = could not find container \"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7\": container with ID starting with a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.783379 5070 scope.go:117] "RemoveContainer" containerID="5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.783625 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835"} err="failed to get container status \"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835\": rpc error: code = NotFound desc = could not find container \"5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835\": container with ID starting with 5a1d37d5465eb72652deebdf56f46bcbc07c3d90fbfdeaa9e19976a35200e835 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.783650 5070 scope.go:117] "RemoveContainer" containerID="ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.783929 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31"} err="failed to get container status \"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31\": rpc error: code = NotFound desc = could not find container \"ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31\": container with ID starting with ad3a62409c7407eaaeb04fcb6664eba16d846416bbab3c6d2e5d85d493f47b31 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.783960 5070 scope.go:117] "RemoveContainer" containerID="0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.784244 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155"} err="failed to get container status \"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155\": rpc error: code = NotFound desc = could not find container \"0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155\": container with ID starting with 0b5e604e952336b9dec754489e27bf80fac37ce22898a7b62247114559f8d155 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.784265 5070 scope.go:117] "RemoveContainer" containerID="a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.784570 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7"} err="failed to get container status \"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7\": rpc error: code = NotFound desc = could not find container \"a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7\": container with ID starting with a1b86746aa66c80cbe9300c22a993beb387abe2462d29ef76aaf244a3bea62e7 not found: ID does not exist" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.805778 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-scripts\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.805852 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.805923 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-log-httpd\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.805982 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss2x4\" (UniqueName: \"kubernetes.io/projected/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-kube-api-access-ss2x4\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.805998 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-run-httpd\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.806044 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.806120 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-config-data\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.907345 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.907419 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-log-httpd\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.907479 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss2x4\" (UniqueName: \"kubernetes.io/projected/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-kube-api-access-ss2x4\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.907499 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-run-httpd\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.907539 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.907593 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-config-data\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.907653 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-scripts\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.914944 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-run-httpd\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.915501 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.920278 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-log-httpd\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.922352 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.934272 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-config-data\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.947087 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss2x4\" (UniqueName: \"kubernetes.io/projected/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-kube-api-access-ss2x4\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:04 crc kubenswrapper[5070]: I1213 03:31:04.951976 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-scripts\") pod \"ceilometer-0\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " pod="openstack/ceilometer-0" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.009043 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.109196 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e920-account-create-c57q5" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.119916 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0b75-account-create-f5fw5" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.211607 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l88vx\" (UniqueName: \"kubernetes.io/projected/d30e4476-3bb4-4ac3-b30f-67d2900ab0c1-kube-api-access-l88vx\") pod \"d30e4476-3bb4-4ac3-b30f-67d2900ab0c1\" (UID: \"d30e4476-3bb4-4ac3-b30f-67d2900ab0c1\") " Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.211706 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx7vd\" (UniqueName: \"kubernetes.io/projected/528f5eba-2b8a-478c-a09f-6e97729d2b31-kube-api-access-mx7vd\") pod \"528f5eba-2b8a-478c-a09f-6e97729d2b31\" (UID: \"528f5eba-2b8a-478c-a09f-6e97729d2b31\") " Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.216690 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/528f5eba-2b8a-478c-a09f-6e97729d2b31-kube-api-access-mx7vd" (OuterVolumeSpecName: "kube-api-access-mx7vd") pod "528f5eba-2b8a-478c-a09f-6e97729d2b31" (UID: "528f5eba-2b8a-478c-a09f-6e97729d2b31"). InnerVolumeSpecName "kube-api-access-mx7vd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.217824 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d30e4476-3bb4-4ac3-b30f-67d2900ab0c1-kube-api-access-l88vx" (OuterVolumeSpecName: "kube-api-access-l88vx") pod "d30e4476-3bb4-4ac3-b30f-67d2900ab0c1" (UID: "d30e4476-3bb4-4ac3-b30f-67d2900ab0c1"). InnerVolumeSpecName "kube-api-access-l88vx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.314059 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l88vx\" (UniqueName: \"kubernetes.io/projected/d30e4476-3bb4-4ac3-b30f-67d2900ab0c1-kube-api-access-l88vx\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.314108 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx7vd\" (UniqueName: \"kubernetes.io/projected/528f5eba-2b8a-478c-a09f-6e97729d2b31-kube-api-access-mx7vd\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.441299 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:05 crc kubenswrapper[5070]: W1213 03:31:05.443737 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40f75e9d_9b19_4652_9aad_bc38e0d0eca4.slice/crio-4abcc88fe50d758d0b7467b3ad9b9363825fa6b5fb42c739f52d5b7998746391 WatchSource:0}: Error finding container 4abcc88fe50d758d0b7467b3ad9b9363825fa6b5fb42c739f52d5b7998746391: Status 404 returned error can't find the container with id 4abcc88fe50d758d0b7467b3ad9b9363825fa6b5fb42c739f52d5b7998746391 Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.602536 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0b75-account-create-f5fw5" event={"ID":"d30e4476-3bb4-4ac3-b30f-67d2900ab0c1","Type":"ContainerDied","Data":"68069a2adb9fd9f36f386a380dc56bd2b2b7780d79cc28c141547ca0c748b345"} Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.602871 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68069a2adb9fd9f36f386a380dc56bd2b2b7780d79cc28c141547ca0c748b345" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.602587 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0b75-account-create-f5fw5" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.607228 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40f75e9d-9b19-4652-9aad-bc38e0d0eca4","Type":"ContainerStarted","Data":"4abcc88fe50d758d0b7467b3ad9b9363825fa6b5fb42c739f52d5b7998746391"} Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.610420 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e920-account-create-c57q5" event={"ID":"528f5eba-2b8a-478c-a09f-6e97729d2b31","Type":"ContainerDied","Data":"8f5370850edf468ef6d64490a98336cc10d917cd176b3517b26315acb1471633"} Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.610535 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f5370850edf468ef6d64490a98336cc10d917cd176b3517b26315acb1471633" Dec 13 03:31:05 crc kubenswrapper[5070]: I1213 03:31:05.610619 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e920-account-create-c57q5" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.177112 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3892d787-b7e9-4f06-b629-7c30653b7a43" path="/var/lib/kubelet/pods/3892d787-b7e9-4f06-b629-7c30653b7a43/volumes" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.623572 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40f75e9d-9b19-4652-9aad-bc38e0d0eca4","Type":"ContainerStarted","Data":"77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4"} Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.628673 5070 generic.go:334] "Generic (PLEG): container finished" podID="4b4c78b2-a38d-476f-b88a-089e89d92fef" containerID="fda161a35f795c76afb7c65f4274c50e905d564b116fd577ffd4546f835f8039" exitCode=0 Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.628752 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b4c78b2-a38d-476f-b88a-089e89d92fef","Type":"ContainerDied","Data":"fda161a35f795c76afb7c65f4274c50e905d564b116fd577ffd4546f835f8039"} Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.927494 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l8q4l"] Dec 13 03:31:06 crc kubenswrapper[5070]: E1213 03:31:06.936372 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="528f5eba-2b8a-478c-a09f-6e97729d2b31" containerName="mariadb-account-create" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.936529 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="528f5eba-2b8a-478c-a09f-6e97729d2b31" containerName="mariadb-account-create" Dec 13 03:31:06 crc kubenswrapper[5070]: E1213 03:31:06.936616 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d30e4476-3bb4-4ac3-b30f-67d2900ab0c1" containerName="mariadb-account-create" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.936681 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d30e4476-3bb4-4ac3-b30f-67d2900ab0c1" containerName="mariadb-account-create" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.936964 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d30e4476-3bb4-4ac3-b30f-67d2900ab0c1" containerName="mariadb-account-create" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.937044 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="528f5eba-2b8a-478c-a09f-6e97729d2b31" containerName="mariadb-account-create" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.937768 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.942913 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.942972 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.943339 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-c2s4w" Dec 13 03:31:06 crc kubenswrapper[5070]: I1213 03:31:06.953564 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l8q4l"] Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.043835 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-scripts\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.043957 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.043994 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-config-data\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.044043 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qdlz\" (UniqueName: \"kubernetes.io/projected/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-kube-api-access-6qdlz\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.147057 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-scripts\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.147544 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.147589 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-config-data\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.147646 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qdlz\" (UniqueName: \"kubernetes.io/projected/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-kube-api-access-6qdlz\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.153682 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-scripts\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.155279 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.156378 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-config-data\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.178157 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qdlz\" (UniqueName: \"kubernetes.io/projected/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-kube-api-access-6qdlz\") pod \"nova-cell0-conductor-db-sync-l8q4l\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.258283 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.401162 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.453297 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data-custom\") pod \"4b4c78b2-a38d-476f-b88a-089e89d92fef\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.453359 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b4c78b2-a38d-476f-b88a-089e89d92fef-etc-machine-id\") pod \"4b4c78b2-a38d-476f-b88a-089e89d92fef\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.453382 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7sp8\" (UniqueName: \"kubernetes.io/projected/4b4c78b2-a38d-476f-b88a-089e89d92fef-kube-api-access-k7sp8\") pod \"4b4c78b2-a38d-476f-b88a-089e89d92fef\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.453421 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data\") pod \"4b4c78b2-a38d-476f-b88a-089e89d92fef\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.453488 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-scripts\") pod \"4b4c78b2-a38d-476f-b88a-089e89d92fef\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.453615 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-combined-ca-bundle\") pod \"4b4c78b2-a38d-476f-b88a-089e89d92fef\" (UID: \"4b4c78b2-a38d-476f-b88a-089e89d92fef\") " Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.457619 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4b4c78b2-a38d-476f-b88a-089e89d92fef-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4b4c78b2-a38d-476f-b88a-089e89d92fef" (UID: "4b4c78b2-a38d-476f-b88a-089e89d92fef"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.472924 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4b4c78b2-a38d-476f-b88a-089e89d92fef" (UID: "4b4c78b2-a38d-476f-b88a-089e89d92fef"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.477249 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b4c78b2-a38d-476f-b88a-089e89d92fef-kube-api-access-k7sp8" (OuterVolumeSpecName: "kube-api-access-k7sp8") pod "4b4c78b2-a38d-476f-b88a-089e89d92fef" (UID: "4b4c78b2-a38d-476f-b88a-089e89d92fef"). InnerVolumeSpecName "kube-api-access-k7sp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.480992 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-scripts" (OuterVolumeSpecName: "scripts") pod "4b4c78b2-a38d-476f-b88a-089e89d92fef" (UID: "4b4c78b2-a38d-476f-b88a-089e89d92fef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.561821 5070 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.561869 5070 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b4c78b2-a38d-476f-b88a-089e89d92fef-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.561895 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7sp8\" (UniqueName: \"kubernetes.io/projected/4b4c78b2-a38d-476f-b88a-089e89d92fef-kube-api-access-k7sp8\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.561908 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.573634 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b4c78b2-a38d-476f-b88a-089e89d92fef" (UID: "4b4c78b2-a38d-476f-b88a-089e89d92fef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.633003 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data" (OuterVolumeSpecName: "config-data") pod "4b4c78b2-a38d-476f-b88a-089e89d92fef" (UID: "4b4c78b2-a38d-476f-b88a-089e89d92fef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.647879 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40f75e9d-9b19-4652-9aad-bc38e0d0eca4","Type":"ContainerStarted","Data":"0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595"} Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.655910 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4b4c78b2-a38d-476f-b88a-089e89d92fef","Type":"ContainerDied","Data":"9ae36b0518206e635e8f6bf728e691724e7c99a9567f39e34ceeb3f84aaf75d4"} Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.655962 5070 scope.go:117] "RemoveContainer" containerID="d8c0b2df5788ed9d47f087efae84629420a6cf1956558ddb519c6b2f971f72f2" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.656114 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.668222 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.668271 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b4c78b2-a38d-476f-b88a-089e89d92fef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.706664 5070 scope.go:117] "RemoveContainer" containerID="fda161a35f795c76afb7c65f4274c50e905d564b116fd577ffd4546f835f8039" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.709516 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.744531 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.760486 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 03:31:07 crc kubenswrapper[5070]: E1213 03:31:07.761243 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b4c78b2-a38d-476f-b88a-089e89d92fef" containerName="cinder-scheduler" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.761261 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b4c78b2-a38d-476f-b88a-089e89d92fef" containerName="cinder-scheduler" Dec 13 03:31:07 crc kubenswrapper[5070]: E1213 03:31:07.761288 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b4c78b2-a38d-476f-b88a-089e89d92fef" containerName="probe" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.761295 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b4c78b2-a38d-476f-b88a-089e89d92fef" containerName="probe" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.761493 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b4c78b2-a38d-476f-b88a-089e89d92fef" containerName="probe" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.761504 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b4c78b2-a38d-476f-b88a-089e89d92fef" containerName="cinder-scheduler" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.762760 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.767118 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.782732 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.871311 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.871612 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e4b6c4bc-0c14-4506-944f-fea34a040871-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.871789 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpxcd\" (UniqueName: \"kubernetes.io/projected/e4b6c4bc-0c14-4506-944f-fea34a040871-kube-api-access-zpxcd\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.871824 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-config-data\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.871969 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.871990 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-scripts\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: W1213 03:31:07.876094 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d28fde7_7ce2_4aa6_9412_dae4f27cb494.slice/crio-d8be31cb3032d248e3b88ce50dc86123f67153c51d7a4d0c15dc6415ab3fa043 WatchSource:0}: Error finding container d8be31cb3032d248e3b88ce50dc86123f67153c51d7a4d0c15dc6415ab3fa043: Status 404 returned error can't find the container with id d8be31cb3032d248e3b88ce50dc86123f67153c51d7a4d0c15dc6415ab3fa043 Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.883590 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l8q4l"] Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.973576 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e4b6c4bc-0c14-4506-944f-fea34a040871-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.973668 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpxcd\" (UniqueName: \"kubernetes.io/projected/e4b6c4bc-0c14-4506-944f-fea34a040871-kube-api-access-zpxcd\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.973688 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e4b6c4bc-0c14-4506-944f-fea34a040871-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.973695 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-config-data\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.973879 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.973907 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-scripts\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.974060 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.979722 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-scripts\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.980589 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-config-data\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.980683 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.981636 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e4b6c4bc-0c14-4506-944f-fea34a040871-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:07 crc kubenswrapper[5070]: I1213 03:31:07.996136 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpxcd\" (UniqueName: \"kubernetes.io/projected/e4b6c4bc-0c14-4506-944f-fea34a040871-kube-api-access-zpxcd\") pod \"cinder-scheduler-0\" (UID: \"e4b6c4bc-0c14-4506-944f-fea34a040871\") " pod="openstack/cinder-scheduler-0" Dec 13 03:31:08 crc kubenswrapper[5070]: I1213 03:31:08.090771 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 03:31:08 crc kubenswrapper[5070]: I1213 03:31:08.176912 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b4c78b2-a38d-476f-b88a-089e89d92fef" path="/var/lib/kubelet/pods/4b4c78b2-a38d-476f-b88a-089e89d92fef/volumes" Dec 13 03:31:08 crc kubenswrapper[5070]: I1213 03:31:08.634612 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 03:31:08 crc kubenswrapper[5070]: I1213 03:31:08.673494 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l8q4l" event={"ID":"7d28fde7-7ce2-4aa6-9412-dae4f27cb494","Type":"ContainerStarted","Data":"d8be31cb3032d248e3b88ce50dc86123f67153c51d7a4d0c15dc6415ab3fa043"} Dec 13 03:31:08 crc kubenswrapper[5070]: I1213 03:31:08.709521 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40f75e9d-9b19-4652-9aad-bc38e0d0eca4","Type":"ContainerStarted","Data":"b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7"} Dec 13 03:31:08 crc kubenswrapper[5070]: I1213 03:31:08.712650 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"e4b6c4bc-0c14-4506-944f-fea34a040871","Type":"ContainerStarted","Data":"06a2fe49e0cc83b17b7c839106e0acc8df842943fa7c7cd2a26aaa2106ceb635"} Dec 13 03:31:09 crc kubenswrapper[5070]: I1213 03:31:09.390409 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 13 03:31:09 crc kubenswrapper[5070]: I1213 03:31:09.730118 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"e4b6c4bc-0c14-4506-944f-fea34a040871","Type":"ContainerStarted","Data":"740acfe68d84011412cc2c2b24e71472560955b3a0704e0da22fa5af0f1fcd66"} Dec 13 03:31:10 crc kubenswrapper[5070]: I1213 03:31:10.746243 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40f75e9d-9b19-4652-9aad-bc38e0d0eca4","Type":"ContainerStarted","Data":"acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719"} Dec 13 03:31:10 crc kubenswrapper[5070]: I1213 03:31:10.746641 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 03:31:10 crc kubenswrapper[5070]: I1213 03:31:10.749079 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"e4b6c4bc-0c14-4506-944f-fea34a040871","Type":"ContainerStarted","Data":"bd19424018998730fa1ffd473f684e147490259ac3f8dee6cbb1ed79408bc701"} Dec 13 03:31:10 crc kubenswrapper[5070]: I1213 03:31:10.776102 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.041759459 podStartE2EDuration="6.776082998s" podCreationTimestamp="2025-12-13 03:31:04 +0000 UTC" firstStartedPulling="2025-12-13 03:31:05.445780894 +0000 UTC m=+1157.681624440" lastFinishedPulling="2025-12-13 03:31:10.180104433 +0000 UTC m=+1162.415947979" observedRunningTime="2025-12-13 03:31:10.771421541 +0000 UTC m=+1163.007265097" watchObservedRunningTime="2025-12-13 03:31:10.776082998 +0000 UTC m=+1163.011926544" Dec 13 03:31:10 crc kubenswrapper[5070]: I1213 03:31:10.798545 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.7984217190000003 podStartE2EDuration="3.798421719s" podCreationTimestamp="2025-12-13 03:31:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:31:10.790963265 +0000 UTC m=+1163.026806821" watchObservedRunningTime="2025-12-13 03:31:10.798421719 +0000 UTC m=+1163.034265265" Dec 13 03:31:13 crc kubenswrapper[5070]: I1213 03:31:13.091874 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 13 03:31:13 crc kubenswrapper[5070]: I1213 03:31:13.777661 5070 generic.go:334] "Generic (PLEG): container finished" podID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" containerID="9b5413f46bce5d9fb208db75baa625809e81164c0f0a7c77e14239e069537396" exitCode=0 Dec 13 03:31:13 crc kubenswrapper[5070]: I1213 03:31:13.777699 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64f646c4fb-ktrsv" event={"ID":"d7d71324-fddc-4a3a-ab67-164b8d90b2d3","Type":"ContainerDied","Data":"9b5413f46bce5d9fb208db75baa625809e81164c0f0a7c77e14239e069537396"} Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.507312 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.508394 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="ceilometer-central-agent" containerID="cri-o://77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4" gracePeriod=30 Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.508476 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="sg-core" containerID="cri-o://b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7" gracePeriod=30 Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.508476 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="ceilometer-notification-agent" containerID="cri-o://0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595" gracePeriod=30 Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.508498 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="proxy-httpd" containerID="cri-o://acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719" gracePeriod=30 Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.805555 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l8q4l" event={"ID":"7d28fde7-7ce2-4aa6-9412-dae4f27cb494","Type":"ContainerStarted","Data":"0cd6fd61fefd84c776775418027b78e5fb60f51884e89d6db56e9c5b3723b879"} Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.820431 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40f75e9d-9b19-4652-9aad-bc38e0d0eca4","Type":"ContainerDied","Data":"acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719"} Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.820603 5070 generic.go:334] "Generic (PLEG): container finished" podID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerID="acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719" exitCode=0 Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.820637 5070 generic.go:334] "Generic (PLEG): container finished" podID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerID="b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7" exitCode=2 Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.820669 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40f75e9d-9b19-4652-9aad-bc38e0d0eca4","Type":"ContainerDied","Data":"b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7"} Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.829964 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-l8q4l" podStartSLOduration=2.282863059 podStartE2EDuration="9.829941249s" podCreationTimestamp="2025-12-13 03:31:06 +0000 UTC" firstStartedPulling="2025-12-13 03:31:07.87925818 +0000 UTC m=+1160.115101726" lastFinishedPulling="2025-12-13 03:31:15.42633636 +0000 UTC m=+1167.662179916" observedRunningTime="2025-12-13 03:31:15.826776221 +0000 UTC m=+1168.062619767" watchObservedRunningTime="2025-12-13 03:31:15.829941249 +0000 UTC m=+1168.065784795" Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.840453 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.929175 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-config\") pod \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.929524 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wjh5\" (UniqueName: \"kubernetes.io/projected/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-kube-api-access-4wjh5\") pod \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.929583 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-httpd-config\") pod \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.929656 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-combined-ca-bundle\") pod \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.929746 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-ovndb-tls-certs\") pod \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\" (UID: \"d7d71324-fddc-4a3a-ab67-164b8d90b2d3\") " Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.934644 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-kube-api-access-4wjh5" (OuterVolumeSpecName: "kube-api-access-4wjh5") pod "d7d71324-fddc-4a3a-ab67-164b8d90b2d3" (UID: "d7d71324-fddc-4a3a-ab67-164b8d90b2d3"). InnerVolumeSpecName "kube-api-access-4wjh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.934883 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "d7d71324-fddc-4a3a-ab67-164b8d90b2d3" (UID: "d7d71324-fddc-4a3a-ab67-164b8d90b2d3"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.978451 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-config" (OuterVolumeSpecName: "config") pod "d7d71324-fddc-4a3a-ab67-164b8d90b2d3" (UID: "d7d71324-fddc-4a3a-ab67-164b8d90b2d3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:15 crc kubenswrapper[5070]: I1213 03:31:15.981581 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7d71324-fddc-4a3a-ab67-164b8d90b2d3" (UID: "d7d71324-fddc-4a3a-ab67-164b8d90b2d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.029008 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "d7d71324-fddc-4a3a-ab67-164b8d90b2d3" (UID: "d7d71324-fddc-4a3a-ab67-164b8d90b2d3"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.031640 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.031725 5070 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.031799 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.031856 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wjh5\" (UniqueName: \"kubernetes.io/projected/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-kube-api-access-4wjh5\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.031922 5070 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7d71324-fddc-4a3a-ab67-164b8d90b2d3-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.189060 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.234408 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-config-data\") pod \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.234798 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-combined-ca-bundle\") pod \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.234996 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-run-httpd\") pod \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.235207 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ss2x4\" (UniqueName: \"kubernetes.io/projected/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-kube-api-access-ss2x4\") pod \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.235363 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "40f75e9d-9b19-4652-9aad-bc38e0d0eca4" (UID: "40f75e9d-9b19-4652-9aad-bc38e0d0eca4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.235678 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-log-httpd\") pod \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.235926 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-sg-core-conf-yaml\") pod \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.236073 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-scripts\") pod \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\" (UID: \"40f75e9d-9b19-4652-9aad-bc38e0d0eca4\") " Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.236824 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "40f75e9d-9b19-4652-9aad-bc38e0d0eca4" (UID: "40f75e9d-9b19-4652-9aad-bc38e0d0eca4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.238162 5070 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.238213 5070 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.238787 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-kube-api-access-ss2x4" (OuterVolumeSpecName: "kube-api-access-ss2x4") pod "40f75e9d-9b19-4652-9aad-bc38e0d0eca4" (UID: "40f75e9d-9b19-4652-9aad-bc38e0d0eca4"). InnerVolumeSpecName "kube-api-access-ss2x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.239138 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-scripts" (OuterVolumeSpecName: "scripts") pod "40f75e9d-9b19-4652-9aad-bc38e0d0eca4" (UID: "40f75e9d-9b19-4652-9aad-bc38e0d0eca4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.258112 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "40f75e9d-9b19-4652-9aad-bc38e0d0eca4" (UID: "40f75e9d-9b19-4652-9aad-bc38e0d0eca4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.312920 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40f75e9d-9b19-4652-9aad-bc38e0d0eca4" (UID: "40f75e9d-9b19-4652-9aad-bc38e0d0eca4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.326148 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-config-data" (OuterVolumeSpecName: "config-data") pod "40f75e9d-9b19-4652-9aad-bc38e0d0eca4" (UID: "40f75e9d-9b19-4652-9aad-bc38e0d0eca4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.339983 5070 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.340011 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.340022 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.340032 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.340041 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ss2x4\" (UniqueName: \"kubernetes.io/projected/40f75e9d-9b19-4652-9aad-bc38e0d0eca4-kube-api-access-ss2x4\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.832216 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64f646c4fb-ktrsv" event={"ID":"d7d71324-fddc-4a3a-ab67-164b8d90b2d3","Type":"ContainerDied","Data":"7e62ff28075d74ff673f742def0d62d2df3ac933d49687123c191c212a95b1df"} Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.832608 5070 scope.go:117] "RemoveContainer" containerID="a54a2e955030fa01e6711e092c1c18c7b1e5d91787c10c59117de8ab23fd71c1" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.832748 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64f646c4fb-ktrsv" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.840937 5070 generic.go:334] "Generic (PLEG): container finished" podID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerID="0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595" exitCode=0 Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.841258 5070 generic.go:334] "Generic (PLEG): container finished" podID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerID="77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4" exitCode=0 Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.841146 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.841178 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40f75e9d-9b19-4652-9aad-bc38e0d0eca4","Type":"ContainerDied","Data":"0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595"} Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.841768 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40f75e9d-9b19-4652-9aad-bc38e0d0eca4","Type":"ContainerDied","Data":"77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4"} Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.841788 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40f75e9d-9b19-4652-9aad-bc38e0d0eca4","Type":"ContainerDied","Data":"4abcc88fe50d758d0b7467b3ad9b9363825fa6b5fb42c739f52d5b7998746391"} Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.870047 5070 scope.go:117] "RemoveContainer" containerID="9b5413f46bce5d9fb208db75baa625809e81164c0f0a7c77e14239e069537396" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.879608 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-64f646c4fb-ktrsv"] Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.905573 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-64f646c4fb-ktrsv"] Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.909638 5070 scope.go:117] "RemoveContainer" containerID="acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.912899 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.921674 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.933823 5070 scope.go:117] "RemoveContainer" containerID="b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.936904 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:16 crc kubenswrapper[5070]: E1213 03:31:16.937278 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="ceilometer-notification-agent" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937295 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="ceilometer-notification-agent" Dec 13 03:31:16 crc kubenswrapper[5070]: E1213 03:31:16.937314 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" containerName="neutron-api" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937320 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" containerName="neutron-api" Dec 13 03:31:16 crc kubenswrapper[5070]: E1213 03:31:16.937331 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="sg-core" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937337 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="sg-core" Dec 13 03:31:16 crc kubenswrapper[5070]: E1213 03:31:16.937358 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="proxy-httpd" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937363 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="proxy-httpd" Dec 13 03:31:16 crc kubenswrapper[5070]: E1213 03:31:16.937376 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="ceilometer-central-agent" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937381 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="ceilometer-central-agent" Dec 13 03:31:16 crc kubenswrapper[5070]: E1213 03:31:16.937390 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" containerName="neutron-httpd" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937396 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" containerName="neutron-httpd" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937576 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="sg-core" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937592 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="ceilometer-notification-agent" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937604 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" containerName="neutron-httpd" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937618 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="ceilometer-central-agent" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937629 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" containerName="neutron-api" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.937636 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" containerName="proxy-httpd" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.939253 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.941020 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.942201 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.948965 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.971082 5070 scope.go:117] "RemoveContainer" containerID="0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595" Dec 13 03:31:16 crc kubenswrapper[5070]: I1213 03:31:16.989862 5070 scope.go:117] "RemoveContainer" containerID="77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.011570 5070 scope.go:117] "RemoveContainer" containerID="acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719" Dec 13 03:31:17 crc kubenswrapper[5070]: E1213 03:31:17.012049 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719\": container with ID starting with acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719 not found: ID does not exist" containerID="acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.012088 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719"} err="failed to get container status \"acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719\": rpc error: code = NotFound desc = could not find container \"acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719\": container with ID starting with acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719 not found: ID does not exist" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.012113 5070 scope.go:117] "RemoveContainer" containerID="b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7" Dec 13 03:31:17 crc kubenswrapper[5070]: E1213 03:31:17.012415 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7\": container with ID starting with b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7 not found: ID does not exist" containerID="b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.012476 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7"} err="failed to get container status \"b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7\": rpc error: code = NotFound desc = could not find container \"b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7\": container with ID starting with b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7 not found: ID does not exist" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.012504 5070 scope.go:117] "RemoveContainer" containerID="0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595" Dec 13 03:31:17 crc kubenswrapper[5070]: E1213 03:31:17.013145 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595\": container with ID starting with 0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595 not found: ID does not exist" containerID="0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.013167 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595"} err="failed to get container status \"0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595\": rpc error: code = NotFound desc = could not find container \"0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595\": container with ID starting with 0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595 not found: ID does not exist" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.013180 5070 scope.go:117] "RemoveContainer" containerID="77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4" Dec 13 03:31:17 crc kubenswrapper[5070]: E1213 03:31:17.013582 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4\": container with ID starting with 77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4 not found: ID does not exist" containerID="77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.013665 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4"} err="failed to get container status \"77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4\": rpc error: code = NotFound desc = could not find container \"77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4\": container with ID starting with 77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4 not found: ID does not exist" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.013694 5070 scope.go:117] "RemoveContainer" containerID="acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.013929 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719"} err="failed to get container status \"acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719\": rpc error: code = NotFound desc = could not find container \"acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719\": container with ID starting with acf0527b801e3dff613a7e2cdcf9353780a38aba856ce953df4ff96f6d089719 not found: ID does not exist" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.013942 5070 scope.go:117] "RemoveContainer" containerID="b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.014125 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7"} err="failed to get container status \"b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7\": rpc error: code = NotFound desc = could not find container \"b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7\": container with ID starting with b9182b73a4a8d5a156c8c4de70f1473d7d7532f8f73fe8c5b585d21077bbeae7 not found: ID does not exist" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.014137 5070 scope.go:117] "RemoveContainer" containerID="0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.014272 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595"} err="failed to get container status \"0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595\": rpc error: code = NotFound desc = could not find container \"0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595\": container with ID starting with 0b793b708934ac93206d0055eb3842da54308594aee82debcf3a6a805f972595 not found: ID does not exist" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.014283 5070 scope.go:117] "RemoveContainer" containerID="77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.014420 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4"} err="failed to get container status \"77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4\": rpc error: code = NotFound desc = could not find container \"77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4\": container with ID starting with 77442c30b04c2458e8d0c0aecfa4f34a8986aa781474c22daee8d0c77597c0a4 not found: ID does not exist" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.056648 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfbt7\" (UniqueName: \"kubernetes.io/projected/d457f353-5e36-43dc-9f36-c0f55839debb-kube-api-access-rfbt7\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.056771 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-run-httpd\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.056882 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.056908 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.057082 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-scripts\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.057120 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-config-data\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.057270 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-log-httpd\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.158583 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-run-httpd\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.158645 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.158662 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.158733 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-scripts\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.158753 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-config-data\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.158804 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-log-httpd\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.158836 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfbt7\" (UniqueName: \"kubernetes.io/projected/d457f353-5e36-43dc-9f36-c0f55839debb-kube-api-access-rfbt7\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.159241 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-log-httpd\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.159369 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-run-httpd\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.163862 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.165054 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.165844 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-config-data\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.186933 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-scripts\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.189804 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfbt7\" (UniqueName: \"kubernetes.io/projected/d457f353-5e36-43dc-9f36-c0f55839debb-kube-api-access-rfbt7\") pod \"ceilometer-0\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.264629 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.774904 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:17 crc kubenswrapper[5070]: W1213 03:31:17.784085 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd457f353_5e36_43dc_9f36_c0f55839debb.slice/crio-12614ed54ac4877f96fb912c123abfb7bde6f1c1c103da958815363dbb669d5d WatchSource:0}: Error finding container 12614ed54ac4877f96fb912c123abfb7bde6f1c1c103da958815363dbb669d5d: Status 404 returned error can't find the container with id 12614ed54ac4877f96fb912c123abfb7bde6f1c1c103da958815363dbb669d5d Dec 13 03:31:17 crc kubenswrapper[5070]: I1213 03:31:17.855657 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d457f353-5e36-43dc-9f36-c0f55839debb","Type":"ContainerStarted","Data":"12614ed54ac4877f96fb912c123abfb7bde6f1c1c103da958815363dbb669d5d"} Dec 13 03:31:18 crc kubenswrapper[5070]: I1213 03:31:18.178545 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40f75e9d-9b19-4652-9aad-bc38e0d0eca4" path="/var/lib/kubelet/pods/40f75e9d-9b19-4652-9aad-bc38e0d0eca4/volumes" Dec 13 03:31:18 crc kubenswrapper[5070]: I1213 03:31:18.179695 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7d71324-fddc-4a3a-ab67-164b8d90b2d3" path="/var/lib/kubelet/pods/d7d71324-fddc-4a3a-ab67-164b8d90b2d3/volumes" Dec 13 03:31:18 crc kubenswrapper[5070]: I1213 03:31:18.310229 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 13 03:31:18 crc kubenswrapper[5070]: I1213 03:31:18.864908 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d457f353-5e36-43dc-9f36-c0f55839debb","Type":"ContainerStarted","Data":"9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8"} Dec 13 03:31:19 crc kubenswrapper[5070]: I1213 03:31:19.881143 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d457f353-5e36-43dc-9f36-c0f55839debb","Type":"ContainerStarted","Data":"fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152"} Dec 13 03:31:20 crc kubenswrapper[5070]: I1213 03:31:20.898809 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d457f353-5e36-43dc-9f36-c0f55839debb","Type":"ContainerStarted","Data":"2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f"} Dec 13 03:31:21 crc kubenswrapper[5070]: I1213 03:31:21.910020 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d457f353-5e36-43dc-9f36-c0f55839debb","Type":"ContainerStarted","Data":"d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3"} Dec 13 03:31:21 crc kubenswrapper[5070]: I1213 03:31:21.910788 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 03:31:21 crc kubenswrapper[5070]: I1213 03:31:21.930088 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.053331195 podStartE2EDuration="5.930069049s" podCreationTimestamp="2025-12-13 03:31:16 +0000 UTC" firstStartedPulling="2025-12-13 03:31:17.787771688 +0000 UTC m=+1170.023615234" lastFinishedPulling="2025-12-13 03:31:21.664509542 +0000 UTC m=+1173.900353088" observedRunningTime="2025-12-13 03:31:21.928739963 +0000 UTC m=+1174.164583519" watchObservedRunningTime="2025-12-13 03:31:21.930069049 +0000 UTC m=+1174.165912595" Dec 13 03:31:26 crc kubenswrapper[5070]: I1213 03:31:26.955900 5070 generic.go:334] "Generic (PLEG): container finished" podID="7d28fde7-7ce2-4aa6-9412-dae4f27cb494" containerID="0cd6fd61fefd84c776775418027b78e5fb60f51884e89d6db56e9c5b3723b879" exitCode=0 Dec 13 03:31:26 crc kubenswrapper[5070]: I1213 03:31:26.956021 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l8q4l" event={"ID":"7d28fde7-7ce2-4aa6-9412-dae4f27cb494","Type":"ContainerDied","Data":"0cd6fd61fefd84c776775418027b78e5fb60f51884e89d6db56e9c5b3723b879"} Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.293286 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.370878 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-config-data\") pod \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.371144 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qdlz\" (UniqueName: \"kubernetes.io/projected/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-kube-api-access-6qdlz\") pod \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.371219 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-combined-ca-bundle\") pod \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.371296 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-scripts\") pod \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\" (UID: \"7d28fde7-7ce2-4aa6-9412-dae4f27cb494\") " Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.378528 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-scripts" (OuterVolumeSpecName: "scripts") pod "7d28fde7-7ce2-4aa6-9412-dae4f27cb494" (UID: "7d28fde7-7ce2-4aa6-9412-dae4f27cb494"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.378525 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-kube-api-access-6qdlz" (OuterVolumeSpecName: "kube-api-access-6qdlz") pod "7d28fde7-7ce2-4aa6-9412-dae4f27cb494" (UID: "7d28fde7-7ce2-4aa6-9412-dae4f27cb494"). InnerVolumeSpecName "kube-api-access-6qdlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.398112 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d28fde7-7ce2-4aa6-9412-dae4f27cb494" (UID: "7d28fde7-7ce2-4aa6-9412-dae4f27cb494"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.401312 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-config-data" (OuterVolumeSpecName: "config-data") pod "7d28fde7-7ce2-4aa6-9412-dae4f27cb494" (UID: "7d28fde7-7ce2-4aa6-9412-dae4f27cb494"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.473363 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.473404 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qdlz\" (UniqueName: \"kubernetes.io/projected/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-kube-api-access-6qdlz\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.473415 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.473422 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d28fde7-7ce2-4aa6-9412-dae4f27cb494-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.977830 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-l8q4l" event={"ID":"7d28fde7-7ce2-4aa6-9412-dae4f27cb494","Type":"ContainerDied","Data":"d8be31cb3032d248e3b88ce50dc86123f67153c51d7a4d0c15dc6415ab3fa043"} Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.977868 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8be31cb3032d248e3b88ce50dc86123f67153c51d7a4d0c15dc6415ab3fa043" Dec 13 03:31:28 crc kubenswrapper[5070]: I1213 03:31:28.977916 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-l8q4l" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.081334 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 13 03:31:29 crc kubenswrapper[5070]: E1213 03:31:29.081801 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d28fde7-7ce2-4aa6-9412-dae4f27cb494" containerName="nova-cell0-conductor-db-sync" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.081821 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d28fde7-7ce2-4aa6-9412-dae4f27cb494" containerName="nova-cell0-conductor-db-sync" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.082046 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d28fde7-7ce2-4aa6-9412-dae4f27cb494" containerName="nova-cell0-conductor-db-sync" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.082768 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.084860 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-c2s4w" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.088823 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.094491 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.186071 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee77d0d3-3a57-4105-8561-f7c73ddb8117-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ee77d0d3-3a57-4105-8561-f7c73ddb8117\") " pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.186502 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee77d0d3-3a57-4105-8561-f7c73ddb8117-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ee77d0d3-3a57-4105-8561-f7c73ddb8117\") " pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.186555 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rjfn\" (UniqueName: \"kubernetes.io/projected/ee77d0d3-3a57-4105-8561-f7c73ddb8117-kube-api-access-5rjfn\") pod \"nova-cell0-conductor-0\" (UID: \"ee77d0d3-3a57-4105-8561-f7c73ddb8117\") " pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.288402 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee77d0d3-3a57-4105-8561-f7c73ddb8117-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ee77d0d3-3a57-4105-8561-f7c73ddb8117\") " pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.288565 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rjfn\" (UniqueName: \"kubernetes.io/projected/ee77d0d3-3a57-4105-8561-f7c73ddb8117-kube-api-access-5rjfn\") pod \"nova-cell0-conductor-0\" (UID: \"ee77d0d3-3a57-4105-8561-f7c73ddb8117\") " pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.288584 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee77d0d3-3a57-4105-8561-f7c73ddb8117-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ee77d0d3-3a57-4105-8561-f7c73ddb8117\") " pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.293622 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee77d0d3-3a57-4105-8561-f7c73ddb8117-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ee77d0d3-3a57-4105-8561-f7c73ddb8117\") " pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.302496 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee77d0d3-3a57-4105-8561-f7c73ddb8117-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ee77d0d3-3a57-4105-8561-f7c73ddb8117\") " pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.304192 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rjfn\" (UniqueName: \"kubernetes.io/projected/ee77d0d3-3a57-4105-8561-f7c73ddb8117-kube-api-access-5rjfn\") pod \"nova-cell0-conductor-0\" (UID: \"ee77d0d3-3a57-4105-8561-f7c73ddb8117\") " pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.401354 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.857359 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 13 03:31:29 crc kubenswrapper[5070]: W1213 03:31:29.858578 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee77d0d3_3a57_4105_8561_f7c73ddb8117.slice/crio-0a84d7f154c5380f915f64a8fef8e8800bc952f9304dd5fd8b034280b7d90fed WatchSource:0}: Error finding container 0a84d7f154c5380f915f64a8fef8e8800bc952f9304dd5fd8b034280b7d90fed: Status 404 returned error can't find the container with id 0a84d7f154c5380f915f64a8fef8e8800bc952f9304dd5fd8b034280b7d90fed Dec 13 03:31:29 crc kubenswrapper[5070]: I1213 03:31:29.987933 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ee77d0d3-3a57-4105-8561-f7c73ddb8117","Type":"ContainerStarted","Data":"0a84d7f154c5380f915f64a8fef8e8800bc952f9304dd5fd8b034280b7d90fed"} Dec 13 03:31:33 crc kubenswrapper[5070]: I1213 03:31:33.020804 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ee77d0d3-3a57-4105-8561-f7c73ddb8117","Type":"ContainerStarted","Data":"49ec2ebbc7e19a29eebfc91ce843435bf6381677964299f30974a9ddb77e378c"} Dec 13 03:31:33 crc kubenswrapper[5070]: I1213 03:31:33.021330 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:33 crc kubenswrapper[5070]: I1213 03:31:33.055156 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=4.055136869 podStartE2EDuration="4.055136869s" podCreationTimestamp="2025-12-13 03:31:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:31:33.038502515 +0000 UTC m=+1185.274346121" watchObservedRunningTime="2025-12-13 03:31:33.055136869 +0000 UTC m=+1185.290980435" Dec 13 03:31:39 crc kubenswrapper[5070]: I1213 03:31:39.439271 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.020477 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-x5tqc"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.021747 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.025564 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.026536 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.037587 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-x5tqc"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.090093 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-config-data\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.090182 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-scripts\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.090216 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gth6w\" (UniqueName: \"kubernetes.io/projected/21e9c870-f7e7-4ddf-abbb-56911912f97f-kube-api-access-gth6w\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.090262 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.191601 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-scripts\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.191667 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gth6w\" (UniqueName: \"kubernetes.io/projected/21e9c870-f7e7-4ddf-abbb-56911912f97f-kube-api-access-gth6w\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.191712 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.191795 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-config-data\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.197823 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-config-data\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.208855 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.220048 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-scripts\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.258231 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gth6w\" (UniqueName: \"kubernetes.io/projected/21e9c870-f7e7-4ddf-abbb-56911912f97f-kube-api-access-gth6w\") pod \"nova-cell0-cell-mapping-x5tqc\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.271622 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.273373 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.289434 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.313521 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.315273 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.317240 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.335179 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.380036 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.403581 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.407665 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.407735 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cwcc\" (UniqueName: \"kubernetes.io/projected/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-kube-api-access-7cwcc\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.407769 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.407808 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftj8f\" (UniqueName: \"kubernetes.io/projected/e9c685a9-2e74-4815-b092-35d4d1100ecf-kube-api-access-ftj8f\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.407830 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-config-data\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.408046 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-logs\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.408275 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.486338 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.508142 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.509722 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-logs\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.509781 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.509826 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.509866 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cwcc\" (UniqueName: \"kubernetes.io/projected/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-kube-api-access-7cwcc\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.509895 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.509945 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftj8f\" (UniqueName: \"kubernetes.io/projected/e9c685a9-2e74-4815-b092-35d4d1100ecf-kube-api-access-ftj8f\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.511524 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.512344 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-logs\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.512584 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.516846 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.510041 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-config-data\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.520231 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-config-data\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.527410 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.528028 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.546011 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cwcc\" (UniqueName: \"kubernetes.io/projected/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-kube-api-access-7cwcc\") pod \"nova-api-0\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.551864 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftj8f\" (UniqueName: \"kubernetes.io/projected/e9c685a9-2e74-4815-b092-35d4d1100ecf-kube-api-access-ftj8f\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.622614 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.622715 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d71c72d-4599-48e7-8c31-63f6968aacb2-logs\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.622920 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-config-data\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.622942 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mx6z\" (UniqueName: \"kubernetes.io/projected/1d71c72d-4599-48e7-8c31-63f6968aacb2-kube-api-access-8mx6z\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.623639 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.624675 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.640799 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.679008 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-2j5dx"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.681237 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.693357 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724026 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724093 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-config-data\") pod \"nova-scheduler-0\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724142 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqpcs\" (UniqueName: \"kubernetes.io/projected/30d804de-c917-4ff4-9576-3e8410417e0a-kube-api-access-jqpcs\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724187 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-config-data\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724212 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mx6z\" (UniqueName: \"kubernetes.io/projected/1d71c72d-4599-48e7-8c31-63f6968aacb2-kube-api-access-8mx6z\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724246 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724285 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724316 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724343 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724390 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d71c72d-4599-48e7-8c31-63f6968aacb2-logs\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.724410 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsvtw\" (UniqueName: \"kubernetes.io/projected/3747c2a3-8ef8-44dc-af66-057337b617ee-kube-api-access-tsvtw\") pod \"nova-scheduler-0\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.725919 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-config\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.730756 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d71c72d-4599-48e7-8c31-63f6968aacb2-logs\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.731057 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-config-data\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.731398 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.733324 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.740327 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-2j5dx"] Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.766918 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mx6z\" (UniqueName: \"kubernetes.io/projected/1d71c72d-4599-48e7-8c31-63f6968aacb2-kube-api-access-8mx6z\") pod \"nova-metadata-0\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.829813 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.829877 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-config-data\") pod \"nova-scheduler-0\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.829914 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqpcs\" (UniqueName: \"kubernetes.io/projected/30d804de-c917-4ff4-9576-3e8410417e0a-kube-api-access-jqpcs\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.829969 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.830010 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.830042 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.830090 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsvtw\" (UniqueName: \"kubernetes.io/projected/3747c2a3-8ef8-44dc-af66-057337b617ee-kube-api-access-tsvtw\") pod \"nova-scheduler-0\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.830119 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-config\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.831653 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-config\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.833781 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.835067 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.835746 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.840270 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.845741 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-config-data\") pod \"nova-scheduler-0\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.847067 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.859507 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqpcs\" (UniqueName: \"kubernetes.io/projected/30d804de-c917-4ff4-9576-3e8410417e0a-kube-api-access-jqpcs\") pod \"dnsmasq-dns-8b8cf6657-2j5dx\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.860791 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.866472 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsvtw\" (UniqueName: \"kubernetes.io/projected/3747c2a3-8ef8-44dc-af66-057337b617ee-kube-api-access-tsvtw\") pod \"nova-scheduler-0\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:40 crc kubenswrapper[5070]: I1213 03:31:40.973363 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.017736 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.208779 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-x5tqc"] Dec 13 03:31:41 crc kubenswrapper[5070]: W1213 03:31:41.225703 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21e9c870_f7e7_4ddf_abbb_56911912f97f.slice/crio-288d0bb86a9754544635759486e711a81761b3625a8c773848bf87944a06987e WatchSource:0}: Error finding container 288d0bb86a9754544635759486e711a81761b3625a8c773848bf87944a06987e: Status 404 returned error can't find the container with id 288d0bb86a9754544635759486e711a81761b3625a8c773848bf87944a06987e Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.457497 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6ltlj"] Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.458897 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.460711 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.460979 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.475360 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.506423 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6ltlj"] Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.535467 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.555430 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.555502 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-scripts\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.555612 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-config-data\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.555654 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkwm6\" (UniqueName: \"kubernetes.io/projected/19a2a29e-7784-4e4f-99bc-dad38cff1b50-kube-api-access-bkwm6\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.591249 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:41 crc kubenswrapper[5070]: W1213 03:31:41.601322 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice/crio-a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88 WatchSource:0}: Error finding container a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88: Status 404 returned error can't find the container with id a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88 Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.629680 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-2j5dx"] Dec 13 03:31:41 crc kubenswrapper[5070]: W1213 03:31:41.632677 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30d804de_c917_4ff4_9576_3e8410417e0a.slice/crio-dac413f072b2ba18cd5ed24163668704a74b83e4b3578208c8a0b26544f63710 WatchSource:0}: Error finding container dac413f072b2ba18cd5ed24163668704a74b83e4b3578208c8a0b26544f63710: Status 404 returned error can't find the container with id dac413f072b2ba18cd5ed24163668704a74b83e4b3578208c8a0b26544f63710 Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.657113 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-scripts\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.657251 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-config-data\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.657306 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkwm6\" (UniqueName: \"kubernetes.io/projected/19a2a29e-7784-4e4f-99bc-dad38cff1b50-kube-api-access-bkwm6\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.657330 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.660615 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-scripts\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.669631 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-config-data\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.671399 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.680851 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkwm6\" (UniqueName: \"kubernetes.io/projected/19a2a29e-7784-4e4f-99bc-dad38cff1b50-kube-api-access-bkwm6\") pod \"nova-cell1-conductor-db-sync-6ltlj\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.769680 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:31:41 crc kubenswrapper[5070]: W1213 03:31:41.773376 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3747c2a3_8ef8_44dc_af66_057337b617ee.slice/crio-1d255ffb622799e84593756418c470fd6fb730213567f5ebeea5b603c07463da WatchSource:0}: Error finding container 1d255ffb622799e84593756418c470fd6fb730213567f5ebeea5b603c07463da: Status 404 returned error can't find the container with id 1d255ffb622799e84593756418c470fd6fb730213567f5ebeea5b603c07463da Dec 13 03:31:41 crc kubenswrapper[5070]: I1213 03:31:41.885366 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.108987 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d71c72d-4599-48e7-8c31-63f6968aacb2","Type":"ContainerStarted","Data":"a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88"} Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.111232 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-x5tqc" event={"ID":"21e9c870-f7e7-4ddf-abbb-56911912f97f","Type":"ContainerStarted","Data":"d6d64e5d266d635a927f5bef7b322688f4a55e133f22f490d3dd1976ed4c5735"} Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.111292 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-x5tqc" event={"ID":"21e9c870-f7e7-4ddf-abbb-56911912f97f","Type":"ContainerStarted","Data":"288d0bb86a9754544635759486e711a81761b3625a8c773848bf87944a06987e"} Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.114720 5070 generic.go:334] "Generic (PLEG): container finished" podID="30d804de-c917-4ff4-9576-3e8410417e0a" containerID="3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af" exitCode=0 Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.114754 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" event={"ID":"30d804de-c917-4ff4-9576-3e8410417e0a","Type":"ContainerDied","Data":"3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af"} Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.114791 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" event={"ID":"30d804de-c917-4ff4-9576-3e8410417e0a","Type":"ContainerStarted","Data":"dac413f072b2ba18cd5ed24163668704a74b83e4b3578208c8a0b26544f63710"} Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.120666 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3747c2a3-8ef8-44dc-af66-057337b617ee","Type":"ContainerStarted","Data":"1d255ffb622799e84593756418c470fd6fb730213567f5ebeea5b603c07463da"} Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.121742 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e9c685a9-2e74-4815-b092-35d4d1100ecf","Type":"ContainerStarted","Data":"3165380bf8144020aa2d9b08dd971db068827e90592c907143b8b4714a3912a0"} Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.125472 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e","Type":"ContainerStarted","Data":"15109f439df4098b78716053f2ce159e2218f7c6ed43aaf6f13001f1376cff0f"} Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.134796 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-x5tqc" podStartSLOduration=2.134779169 podStartE2EDuration="2.134779169s" podCreationTimestamp="2025-12-13 03:31:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:31:42.125671849 +0000 UTC m=+1194.361515395" watchObservedRunningTime="2025-12-13 03:31:42.134779169 +0000 UTC m=+1194.370622715" Dec 13 03:31:42 crc kubenswrapper[5070]: I1213 03:31:42.381643 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6ltlj"] Dec 13 03:31:43 crc kubenswrapper[5070]: I1213 03:31:43.138627 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6ltlj" event={"ID":"19a2a29e-7784-4e4f-99bc-dad38cff1b50","Type":"ContainerStarted","Data":"3e361442e6367decca6e454c4bfd5e4caa44d7ea8f7e3176ff123cb097250947"} Dec 13 03:31:43 crc kubenswrapper[5070]: I1213 03:31:43.138935 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6ltlj" event={"ID":"19a2a29e-7784-4e4f-99bc-dad38cff1b50","Type":"ContainerStarted","Data":"1e106d376292fcac22c6c9b84f3b0bae05d8e92db7864b24bd51746d723ddfe8"} Dec 13 03:31:43 crc kubenswrapper[5070]: I1213 03:31:43.144140 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" event={"ID":"30d804de-c917-4ff4-9576-3e8410417e0a","Type":"ContainerStarted","Data":"a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf"} Dec 13 03:31:43 crc kubenswrapper[5070]: I1213 03:31:43.174106 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-6ltlj" podStartSLOduration=2.174089319 podStartE2EDuration="2.174089319s" podCreationTimestamp="2025-12-13 03:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:31:43.160640071 +0000 UTC m=+1195.396483627" watchObservedRunningTime="2025-12-13 03:31:43.174089319 +0000 UTC m=+1195.409932865" Dec 13 03:31:43 crc kubenswrapper[5070]: I1213 03:31:43.188053 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" podStartSLOduration=3.18803268 podStartE2EDuration="3.18803268s" podCreationTimestamp="2025-12-13 03:31:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:31:43.179473145 +0000 UTC m=+1195.415316691" watchObservedRunningTime="2025-12-13 03:31:43.18803268 +0000 UTC m=+1195.423876226" Dec 13 03:31:44 crc kubenswrapper[5070]: I1213 03:31:44.159351 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:45 crc kubenswrapper[5070]: I1213 03:31:45.008504 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:45 crc kubenswrapper[5070]: I1213 03:31:45.019458 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.204363 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e","Type":"ContainerStarted","Data":"29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3"} Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.204919 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e","Type":"ContainerStarted","Data":"bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748"} Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.207265 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d71c72d-4599-48e7-8c31-63f6968aacb2","Type":"ContainerStarted","Data":"f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc"} Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.207295 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d71c72d-4599-48e7-8c31-63f6968aacb2","Type":"ContainerStarted","Data":"ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab"} Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.207405 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="1d71c72d-4599-48e7-8c31-63f6968aacb2" containerName="nova-metadata-log" containerID="cri-o://ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab" gracePeriod=30 Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.207727 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="1d71c72d-4599-48e7-8c31-63f6968aacb2" containerName="nova-metadata-metadata" containerID="cri-o://f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc" gracePeriod=30 Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.211565 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3747c2a3-8ef8-44dc-af66-057337b617ee","Type":"ContainerStarted","Data":"a34216d2a3de0f053012fcb25a16605bc44a33b676e868542e96a41e93ec9a57"} Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.213688 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e9c685a9-2e74-4815-b092-35d4d1100ecf","Type":"ContainerStarted","Data":"1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065"} Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.213784 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="e9c685a9-2e74-4815-b092-35d4d1100ecf" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065" gracePeriod=30 Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.230673 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.8865668319999997 podStartE2EDuration="7.230647927s" podCreationTimestamp="2025-12-13 03:31:40 +0000 UTC" firstStartedPulling="2025-12-13 03:31:41.526235159 +0000 UTC m=+1193.762078705" lastFinishedPulling="2025-12-13 03:31:45.870316254 +0000 UTC m=+1198.106159800" observedRunningTime="2025-12-13 03:31:47.225350142 +0000 UTC m=+1199.461193728" watchObservedRunningTime="2025-12-13 03:31:47.230647927 +0000 UTC m=+1199.466491483" Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.253301 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.94876331 podStartE2EDuration="7.253280715s" podCreationTimestamp="2025-12-13 03:31:40 +0000 UTC" firstStartedPulling="2025-12-13 03:31:41.565587784 +0000 UTC m=+1193.801431330" lastFinishedPulling="2025-12-13 03:31:45.870105199 +0000 UTC m=+1198.105948735" observedRunningTime="2025-12-13 03:31:47.24653581 +0000 UTC m=+1199.482379366" watchObservedRunningTime="2025-12-13 03:31:47.253280715 +0000 UTC m=+1199.489124271" Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.271036 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.283944 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.034194125 podStartE2EDuration="7.282464862s" podCreationTimestamp="2025-12-13 03:31:40 +0000 UTC" firstStartedPulling="2025-12-13 03:31:41.623484677 +0000 UTC m=+1193.859328223" lastFinishedPulling="2025-12-13 03:31:45.871755414 +0000 UTC m=+1198.107598960" observedRunningTime="2025-12-13 03:31:47.27762906 +0000 UTC m=+1199.513472626" watchObservedRunningTime="2025-12-13 03:31:47.282464862 +0000 UTC m=+1199.518308408" Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.296462 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.193709125 podStartE2EDuration="7.296429284s" podCreationTimestamp="2025-12-13 03:31:40 +0000 UTC" firstStartedPulling="2025-12-13 03:31:41.776518409 +0000 UTC m=+1194.012361955" lastFinishedPulling="2025-12-13 03:31:45.879238568 +0000 UTC m=+1198.115082114" observedRunningTime="2025-12-13 03:31:47.29515572 +0000 UTC m=+1199.530999266" watchObservedRunningTime="2025-12-13 03:31:47.296429284 +0000 UTC m=+1199.532272830" Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.912519 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.925541 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d71c72d-4599-48e7-8c31-63f6968aacb2-logs\") pod \"1d71c72d-4599-48e7-8c31-63f6968aacb2\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.926076 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d71c72d-4599-48e7-8c31-63f6968aacb2-logs" (OuterVolumeSpecName: "logs") pod "1d71c72d-4599-48e7-8c31-63f6968aacb2" (UID: "1d71c72d-4599-48e7-8c31-63f6968aacb2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.925611 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-combined-ca-bundle\") pod \"1d71c72d-4599-48e7-8c31-63f6968aacb2\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.926241 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mx6z\" (UniqueName: \"kubernetes.io/projected/1d71c72d-4599-48e7-8c31-63f6968aacb2-kube-api-access-8mx6z\") pod \"1d71c72d-4599-48e7-8c31-63f6968aacb2\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.926363 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-config-data\") pod \"1d71c72d-4599-48e7-8c31-63f6968aacb2\" (UID: \"1d71c72d-4599-48e7-8c31-63f6968aacb2\") " Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.927027 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1d71c72d-4599-48e7-8c31-63f6968aacb2-logs\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.931830 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d71c72d-4599-48e7-8c31-63f6968aacb2-kube-api-access-8mx6z" (OuterVolumeSpecName: "kube-api-access-8mx6z") pod "1d71c72d-4599-48e7-8c31-63f6968aacb2" (UID: "1d71c72d-4599-48e7-8c31-63f6968aacb2"). InnerVolumeSpecName "kube-api-access-8mx6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.959116 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d71c72d-4599-48e7-8c31-63f6968aacb2" (UID: "1d71c72d-4599-48e7-8c31-63f6968aacb2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:47 crc kubenswrapper[5070]: I1213 03:31:47.980676 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-config-data" (OuterVolumeSpecName: "config-data") pod "1d71c72d-4599-48e7-8c31-63f6968aacb2" (UID: "1d71c72d-4599-48e7-8c31-63f6968aacb2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.030997 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.031286 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mx6z\" (UniqueName: \"kubernetes.io/projected/1d71c72d-4599-48e7-8c31-63f6968aacb2-kube-api-access-8mx6z\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.031376 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d71c72d-4599-48e7-8c31-63f6968aacb2-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.230322 5070 generic.go:334] "Generic (PLEG): container finished" podID="1d71c72d-4599-48e7-8c31-63f6968aacb2" containerID="f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc" exitCode=0 Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.231268 5070 generic.go:334] "Generic (PLEG): container finished" podID="1d71c72d-4599-48e7-8c31-63f6968aacb2" containerID="ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab" exitCode=143 Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.230423 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.230472 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d71c72d-4599-48e7-8c31-63f6968aacb2","Type":"ContainerDied","Data":"f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc"} Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.233721 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d71c72d-4599-48e7-8c31-63f6968aacb2","Type":"ContainerDied","Data":"ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab"} Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.233745 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1d71c72d-4599-48e7-8c31-63f6968aacb2","Type":"ContainerDied","Data":"a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88"} Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.233763 5070 scope.go:117] "RemoveContainer" containerID="f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.266728 5070 scope.go:117] "RemoveContainer" containerID="ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.270872 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.284151 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.295031 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:48 crc kubenswrapper[5070]: E1213 03:31:48.295661 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d71c72d-4599-48e7-8c31-63f6968aacb2" containerName="nova-metadata-metadata" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.295690 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d71c72d-4599-48e7-8c31-63f6968aacb2" containerName="nova-metadata-metadata" Dec 13 03:31:48 crc kubenswrapper[5070]: E1213 03:31:48.295718 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d71c72d-4599-48e7-8c31-63f6968aacb2" containerName="nova-metadata-log" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.295728 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d71c72d-4599-48e7-8c31-63f6968aacb2" containerName="nova-metadata-log" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.295961 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d71c72d-4599-48e7-8c31-63f6968aacb2" containerName="nova-metadata-log" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.295988 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d71c72d-4599-48e7-8c31-63f6968aacb2" containerName="nova-metadata-metadata" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.297012 5070 scope.go:117] "RemoveContainer" containerID="f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.297302 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: E1213 03:31:48.298914 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc\": container with ID starting with f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc not found: ID does not exist" containerID="f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.298959 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc"} err="failed to get container status \"f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc\": rpc error: code = NotFound desc = could not find container \"f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc\": container with ID starting with f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc not found: ID does not exist" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.298988 5070 scope.go:117] "RemoveContainer" containerID="ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab" Dec 13 03:31:48 crc kubenswrapper[5070]: E1213 03:31:48.300019 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab\": container with ID starting with ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab not found: ID does not exist" containerID="ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.300056 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab"} err="failed to get container status \"ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab\": rpc error: code = NotFound desc = could not find container \"ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab\": container with ID starting with ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab not found: ID does not exist" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.300078 5070 scope.go:117] "RemoveContainer" containerID="f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.300084 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.300413 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc"} err="failed to get container status \"f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc\": rpc error: code = NotFound desc = could not find container \"f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc\": container with ID starting with f8290b2017126ae714321fdf64b5a9517e31727f96389a5329dc4257a592cecc not found: ID does not exist" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.300463 5070 scope.go:117] "RemoveContainer" containerID="ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.300590 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.300916 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab"} err="failed to get container status \"ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab\": rpc error: code = NotFound desc = could not find container \"ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab\": container with ID starting with ab93fed71144f44516b1b80d4eed9867e83ef2434df55848683d378c9d403fab not found: ID does not exist" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.302633 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.341190 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.341244 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4plgz\" (UniqueName: \"kubernetes.io/projected/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-kube-api-access-4plgz\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.341328 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-logs\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.341360 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.341432 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-config-data\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.442885 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-config-data\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.442993 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.443016 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4plgz\" (UniqueName: \"kubernetes.io/projected/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-kube-api-access-4plgz\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.443088 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-logs\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.443113 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.443599 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-logs\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.461033 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.461377 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4plgz\" (UniqueName: \"kubernetes.io/projected/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-kube-api-access-4plgz\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.462519 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-config-data\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.465815 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " pod="openstack/nova-metadata-0" Dec 13 03:31:48 crc kubenswrapper[5070]: I1213 03:31:48.621895 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:31:49 crc kubenswrapper[5070]: W1213 03:31:49.167925 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94afc769_5904_4c65_bbe1_5ac8e5c12f8e.slice/crio-5bf39d8a24161ad4f4759736621258750e71fb00271a3ce9425c36494228b87d WatchSource:0}: Error finding container 5bf39d8a24161ad4f4759736621258750e71fb00271a3ce9425c36494228b87d: Status 404 returned error can't find the container with id 5bf39d8a24161ad4f4759736621258750e71fb00271a3ce9425c36494228b87d Dec 13 03:31:49 crc kubenswrapper[5070]: I1213 03:31:49.185815 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:49 crc kubenswrapper[5070]: I1213 03:31:49.269871 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"94afc769-5904-4c65-bbe1-5ac8e5c12f8e","Type":"ContainerStarted","Data":"5bf39d8a24161ad4f4759736621258750e71fb00271a3ce9425c36494228b87d"} Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.080823 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.081793 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="691f47eb-c110-4423-8dac-5515ac7306e0" containerName="kube-state-metrics" containerID="cri-o://e555220c6bd8b607f70338c3f9def7a53af884e6234757b172f7449e6515b719" gracePeriod=30 Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.179912 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d71c72d-4599-48e7-8c31-63f6968aacb2" path="/var/lib/kubelet/pods/1d71c72d-4599-48e7-8c31-63f6968aacb2/volumes" Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.343167 5070 generic.go:334] "Generic (PLEG): container finished" podID="21e9c870-f7e7-4ddf-abbb-56911912f97f" containerID="d6d64e5d266d635a927f5bef7b322688f4a55e133f22f490d3dd1976ed4c5735" exitCode=0 Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.343297 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-x5tqc" event={"ID":"21e9c870-f7e7-4ddf-abbb-56911912f97f","Type":"ContainerDied","Data":"d6d64e5d266d635a927f5bef7b322688f4a55e133f22f490d3dd1976ed4c5735"} Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.359006 5070 generic.go:334] "Generic (PLEG): container finished" podID="691f47eb-c110-4423-8dac-5515ac7306e0" containerID="e555220c6bd8b607f70338c3f9def7a53af884e6234757b172f7449e6515b719" exitCode=2 Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.359115 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"691f47eb-c110-4423-8dac-5515ac7306e0","Type":"ContainerDied","Data":"e555220c6bd8b607f70338c3f9def7a53af884e6234757b172f7449e6515b719"} Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.363894 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"94afc769-5904-4c65-bbe1-5ac8e5c12f8e","Type":"ContainerStarted","Data":"9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6"} Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.363946 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"94afc769-5904-4c65-bbe1-5ac8e5c12f8e","Type":"ContainerStarted","Data":"af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855"} Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.386347 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.386325248 podStartE2EDuration="2.386325248s" podCreationTimestamp="2025-12-13 03:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:31:50.383457389 +0000 UTC m=+1202.619300945" watchObservedRunningTime="2025-12-13 03:31:50.386325248 +0000 UTC m=+1202.622168794" Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.549403 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.582952 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h48x7\" (UniqueName: \"kubernetes.io/projected/691f47eb-c110-4423-8dac-5515ac7306e0-kube-api-access-h48x7\") pod \"691f47eb-c110-4423-8dac-5515ac7306e0\" (UID: \"691f47eb-c110-4423-8dac-5515ac7306e0\") " Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.601420 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/691f47eb-c110-4423-8dac-5515ac7306e0-kube-api-access-h48x7" (OuterVolumeSpecName: "kube-api-access-h48x7") pod "691f47eb-c110-4423-8dac-5515ac7306e0" (UID: "691f47eb-c110-4423-8dac-5515ac7306e0"). InnerVolumeSpecName "kube-api-access-h48x7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.685179 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h48x7\" (UniqueName: \"kubernetes.io/projected/691f47eb-c110-4423-8dac-5515ac7306e0-kube-api-access-h48x7\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.733154 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.733193 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.842063 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.973983 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 13 03:31:50 crc kubenswrapper[5070]: I1213 03:31:50.974081 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.009355 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.020624 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.085137 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-vk92n"] Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.085361 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" podUID="91cb6c81-3062-4ff8-a34e-50a21bc217c0" containerName="dnsmasq-dns" containerID="cri-o://fa7f9c76a143cdbe28e14ead44fbf2f58d25ffec447929984f0388216f943980" gracePeriod=10 Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.347194 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.347533 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="ceilometer-central-agent" containerID="cri-o://9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8" gracePeriod=30 Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.347574 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="proxy-httpd" containerID="cri-o://d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3" gracePeriod=30 Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.347620 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="sg-core" containerID="cri-o://2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f" gracePeriod=30 Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.347637 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="ceilometer-notification-agent" containerID="cri-o://fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152" gracePeriod=30 Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.378978 5070 generic.go:334] "Generic (PLEG): container finished" podID="91cb6c81-3062-4ff8-a34e-50a21bc217c0" containerID="fa7f9c76a143cdbe28e14ead44fbf2f58d25ffec447929984f0388216f943980" exitCode=0 Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.379054 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" event={"ID":"91cb6c81-3062-4ff8-a34e-50a21bc217c0","Type":"ContainerDied","Data":"fa7f9c76a143cdbe28e14ead44fbf2f58d25ffec447929984f0388216f943980"} Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.381597 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.383397 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"691f47eb-c110-4423-8dac-5515ac7306e0","Type":"ContainerDied","Data":"904997c53cc5860d76652e6d733fbbbc420ab9513566cdbf4527da844ece466b"} Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.383464 5070 scope.go:117] "RemoveContainer" containerID="e555220c6bd8b607f70338c3f9def7a53af884e6234757b172f7449e6515b719" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.428721 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.442614 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.464382 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.482519 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 03:31:51 crc kubenswrapper[5070]: E1213 03:31:51.483255 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="691f47eb-c110-4423-8dac-5515ac7306e0" containerName="kube-state-metrics" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.483271 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="691f47eb-c110-4423-8dac-5515ac7306e0" containerName="kube-state-metrics" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.483636 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="691f47eb-c110-4423-8dac-5515ac7306e0" containerName="kube-state-metrics" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.484422 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.492799 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.499902 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.499979 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.504819 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/708b0d41-c778-4856-b02c-895e2c15e3e3-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.504859 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708b0d41-c778-4856-b02c-895e2c15e3e3-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.504889 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/708b0d41-c778-4856-b02c-895e2c15e3e3-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.504986 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgzm2\" (UniqueName: \"kubernetes.io/projected/708b0d41-c778-4856-b02c-895e2c15e3e3-kube-api-access-lgzm2\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.606027 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgzm2\" (UniqueName: \"kubernetes.io/projected/708b0d41-c778-4856-b02c-895e2c15e3e3-kube-api-access-lgzm2\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.606140 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/708b0d41-c778-4856-b02c-895e2c15e3e3-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.606160 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708b0d41-c778-4856-b02c-895e2c15e3e3-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.606177 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/708b0d41-c778-4856-b02c-895e2c15e3e3-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.615120 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/708b0d41-c778-4856-b02c-895e2c15e3e3-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.615543 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708b0d41-c778-4856-b02c-895e2c15e3e3-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.631066 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/708b0d41-c778-4856-b02c-895e2c15e3e3-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.633604 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgzm2\" (UniqueName: \"kubernetes.io/projected/708b0d41-c778-4856-b02c-895e2c15e3e3-kube-api-access-lgzm2\") pod \"kube-state-metrics-0\" (UID: \"708b0d41-c778-4856-b02c-895e2c15e3e3\") " pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.716311 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 03:31:51 crc kubenswrapper[5070]: E1213 03:31:51.718555 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd457f353_5e36_43dc_9f36_c0f55839debb.slice/crio-conmon-2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod691f47eb_c110_4423_8dac_5515ac7306e0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd457f353_5e36_43dc_9f36_c0f55839debb.slice/crio-2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd457f353_5e36_43dc_9f36_c0f55839debb.slice/crio-conmon-d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice/crio-a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd457f353_5e36_43dc_9f36_c0f55839debb.slice/crio-d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3.scope\": RecentStats: unable to find data in memory cache]" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.755344 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.808981 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-sb\") pod \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.809044 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-config\") pod \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.809090 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-nb\") pod \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.809114 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-dns-svc\") pod \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.809222 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b82vq\" (UniqueName: \"kubernetes.io/projected/91cb6c81-3062-4ff8-a34e-50a21bc217c0-kube-api-access-b82vq\") pod \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\" (UID: \"91cb6c81-3062-4ff8-a34e-50a21bc217c0\") " Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.821991 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.168:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.822427 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.168:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.822907 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91cb6c81-3062-4ff8-a34e-50a21bc217c0-kube-api-access-b82vq" (OuterVolumeSpecName: "kube-api-access-b82vq") pod "91cb6c81-3062-4ff8-a34e-50a21bc217c0" (UID: "91cb6c81-3062-4ff8-a34e-50a21bc217c0"). InnerVolumeSpecName "kube-api-access-b82vq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.869996 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "91cb6c81-3062-4ff8-a34e-50a21bc217c0" (UID: "91cb6c81-3062-4ff8-a34e-50a21bc217c0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.881161 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "91cb6c81-3062-4ff8-a34e-50a21bc217c0" (UID: "91cb6c81-3062-4ff8-a34e-50a21bc217c0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.885845 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-config" (OuterVolumeSpecName: "config") pod "91cb6c81-3062-4ff8-a34e-50a21bc217c0" (UID: "91cb6c81-3062-4ff8-a34e-50a21bc217c0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.888959 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "91cb6c81-3062-4ff8-a34e-50a21bc217c0" (UID: "91cb6c81-3062-4ff8-a34e-50a21bc217c0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.896241 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.913534 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.913564 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.913574 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.913583 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b82vq\" (UniqueName: \"kubernetes.io/projected/91cb6c81-3062-4ff8-a34e-50a21bc217c0-kube-api-access-b82vq\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:51 crc kubenswrapper[5070]: I1213 03:31:51.913594 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91cb6c81-3062-4ff8-a34e-50a21bc217c0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.015218 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-config-data\") pod \"21e9c870-f7e7-4ddf-abbb-56911912f97f\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.015576 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-scripts\") pod \"21e9c870-f7e7-4ddf-abbb-56911912f97f\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.015766 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-combined-ca-bundle\") pod \"21e9c870-f7e7-4ddf-abbb-56911912f97f\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.015969 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gth6w\" (UniqueName: \"kubernetes.io/projected/21e9c870-f7e7-4ddf-abbb-56911912f97f-kube-api-access-gth6w\") pod \"21e9c870-f7e7-4ddf-abbb-56911912f97f\" (UID: \"21e9c870-f7e7-4ddf-abbb-56911912f97f\") " Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.021782 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-scripts" (OuterVolumeSpecName: "scripts") pod "21e9c870-f7e7-4ddf-abbb-56911912f97f" (UID: "21e9c870-f7e7-4ddf-abbb-56911912f97f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.021849 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21e9c870-f7e7-4ddf-abbb-56911912f97f-kube-api-access-gth6w" (OuterVolumeSpecName: "kube-api-access-gth6w") pod "21e9c870-f7e7-4ddf-abbb-56911912f97f" (UID: "21e9c870-f7e7-4ddf-abbb-56911912f97f"). InnerVolumeSpecName "kube-api-access-gth6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.048521 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21e9c870-f7e7-4ddf-abbb-56911912f97f" (UID: "21e9c870-f7e7-4ddf-abbb-56911912f97f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.052713 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-config-data" (OuterVolumeSpecName: "config-data") pod "21e9c870-f7e7-4ddf-abbb-56911912f97f" (UID: "21e9c870-f7e7-4ddf-abbb-56911912f97f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.124209 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.124283 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.124295 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gth6w\" (UniqueName: \"kubernetes.io/projected/21e9c870-f7e7-4ddf-abbb-56911912f97f-kube-api-access-gth6w\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.124305 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e9c870-f7e7-4ddf-abbb-56911912f97f-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.177605 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="691f47eb-c110-4423-8dac-5515ac7306e0" path="/var/lib/kubelet/pods/691f47eb-c110-4423-8dac-5515ac7306e0/volumes" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.228136 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 03:31:52 crc kubenswrapper[5070]: W1213 03:31:52.232355 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod708b0d41_c778_4856_b02c_895e2c15e3e3.slice/crio-b157d10801b673f8a16f0de86557b8dd924410e136d24743adab92bbf727cd5a WatchSource:0}: Error finding container b157d10801b673f8a16f0de86557b8dd924410e136d24743adab92bbf727cd5a: Status 404 returned error can't find the container with id b157d10801b673f8a16f0de86557b8dd924410e136d24743adab92bbf727cd5a Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.235394 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.391254 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"708b0d41-c778-4856-b02c-895e2c15e3e3","Type":"ContainerStarted","Data":"b157d10801b673f8a16f0de86557b8dd924410e136d24743adab92bbf727cd5a"} Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.393920 5070 generic.go:334] "Generic (PLEG): container finished" podID="d457f353-5e36-43dc-9f36-c0f55839debb" containerID="d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3" exitCode=0 Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.393956 5070 generic.go:334] "Generic (PLEG): container finished" podID="d457f353-5e36-43dc-9f36-c0f55839debb" containerID="2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f" exitCode=2 Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.393966 5070 generic.go:334] "Generic (PLEG): container finished" podID="d457f353-5e36-43dc-9f36-c0f55839debb" containerID="9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8" exitCode=0 Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.393985 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d457f353-5e36-43dc-9f36-c0f55839debb","Type":"ContainerDied","Data":"d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3"} Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.394010 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d457f353-5e36-43dc-9f36-c0f55839debb","Type":"ContainerDied","Data":"2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f"} Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.394023 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d457f353-5e36-43dc-9f36-c0f55839debb","Type":"ContainerDied","Data":"9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8"} Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.395513 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-x5tqc" event={"ID":"21e9c870-f7e7-4ddf-abbb-56911912f97f","Type":"ContainerDied","Data":"288d0bb86a9754544635759486e711a81761b3625a8c773848bf87944a06987e"} Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.395538 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="288d0bb86a9754544635759486e711a81761b3625a8c773848bf87944a06987e" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.395591 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-x5tqc" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.397471 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" event={"ID":"91cb6c81-3062-4ff8-a34e-50a21bc217c0","Type":"ContainerDied","Data":"a1e6f1e0209ca6b09ea73728cd4961d1dbf603751e48ec9f87190fb0d2904a0f"} Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.397518 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-vk92n" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.397542 5070 scope.go:117] "RemoveContainer" containerID="fa7f9c76a143cdbe28e14ead44fbf2f58d25ffec447929984f0388216f943980" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.424109 5070 scope.go:117] "RemoveContainer" containerID="a3cb01547ffaeb6d218f14015531c2cce8f36a31801d9800d4b94c131a5f8570" Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.429831 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-vk92n"] Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.470135 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-vk92n"] Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.514502 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.514818 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerName="nova-api-log" containerID="cri-o://bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748" gracePeriod=30 Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.515008 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerName="nova-api-api" containerID="cri-o://29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3" gracePeriod=30 Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.553793 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.554015 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" containerName="nova-metadata-log" containerID="cri-o://af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855" gracePeriod=30 Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.554116 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" containerName="nova-metadata-metadata" containerID="cri-o://9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6" gracePeriod=30 Dec 13 03:31:52 crc kubenswrapper[5070]: I1213 03:31:52.623384 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.061063 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.138193 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-nova-metadata-tls-certs\") pod \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.138279 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4plgz\" (UniqueName: \"kubernetes.io/projected/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-kube-api-access-4plgz\") pod \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.138350 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-combined-ca-bundle\") pod \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.138462 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-logs\") pod \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.138523 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-config-data\") pod \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\" (UID: \"94afc769-5904-4c65-bbe1-5ac8e5c12f8e\") " Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.139523 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-logs" (OuterVolumeSpecName: "logs") pod "94afc769-5904-4c65-bbe1-5ac8e5c12f8e" (UID: "94afc769-5904-4c65-bbe1-5ac8e5c12f8e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.148759 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-kube-api-access-4plgz" (OuterVolumeSpecName: "kube-api-access-4plgz") pod "94afc769-5904-4c65-bbe1-5ac8e5c12f8e" (UID: "94afc769-5904-4c65-bbe1-5ac8e5c12f8e"). InnerVolumeSpecName "kube-api-access-4plgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.182303 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94afc769-5904-4c65-bbe1-5ac8e5c12f8e" (UID: "94afc769-5904-4c65-bbe1-5ac8e5c12f8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.199795 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-config-data" (OuterVolumeSpecName: "config-data") pod "94afc769-5904-4c65-bbe1-5ac8e5c12f8e" (UID: "94afc769-5904-4c65-bbe1-5ac8e5c12f8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.240226 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-logs\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.240265 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.243827 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "94afc769-5904-4c65-bbe1-5ac8e5c12f8e" (UID: "94afc769-5904-4c65-bbe1-5ac8e5c12f8e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.240279 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4plgz\" (UniqueName: \"kubernetes.io/projected/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-kube-api-access-4plgz\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.244451 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.346550 5070 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/94afc769-5904-4c65-bbe1-5ac8e5c12f8e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.409849 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"708b0d41-c778-4856-b02c-895e2c15e3e3","Type":"ContainerStarted","Data":"5002d441daa54793040398d1f74b77aad46617cdd4e584cdb325a606bb9a1fda"} Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.409926 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.412315 5070 generic.go:334] "Generic (PLEG): container finished" podID="19a2a29e-7784-4e4f-99bc-dad38cff1b50" containerID="3e361442e6367decca6e454c4bfd5e4caa44d7ea8f7e3176ff123cb097250947" exitCode=0 Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.412392 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6ltlj" event={"ID":"19a2a29e-7784-4e4f-99bc-dad38cff1b50","Type":"ContainerDied","Data":"3e361442e6367decca6e454c4bfd5e4caa44d7ea8f7e3176ff123cb097250947"} Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.414182 5070 generic.go:334] "Generic (PLEG): container finished" podID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerID="bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748" exitCode=143 Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.414240 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e","Type":"ContainerDied","Data":"bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748"} Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.420881 5070 generic.go:334] "Generic (PLEG): container finished" podID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" containerID="9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6" exitCode=0 Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.420908 5070 generic.go:334] "Generic (PLEG): container finished" podID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" containerID="af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855" exitCode=143 Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.420963 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.421007 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"94afc769-5904-4c65-bbe1-5ac8e5c12f8e","Type":"ContainerDied","Data":"9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6"} Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.421040 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3747c2a3-8ef8-44dc-af66-057337b617ee" containerName="nova-scheduler-scheduler" containerID="cri-o://a34216d2a3de0f053012fcb25a16605bc44a33b676e868542e96a41e93ec9a57" gracePeriod=30 Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.421055 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"94afc769-5904-4c65-bbe1-5ac8e5c12f8e","Type":"ContainerDied","Data":"af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855"} Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.421069 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"94afc769-5904-4c65-bbe1-5ac8e5c12f8e","Type":"ContainerDied","Data":"5bf39d8a24161ad4f4759736621258750e71fb00271a3ce9425c36494228b87d"} Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.421087 5070 scope.go:117] "RemoveContainer" containerID="9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.435116 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.074340489 podStartE2EDuration="2.435100917s" podCreationTimestamp="2025-12-13 03:31:51 +0000 UTC" firstStartedPulling="2025-12-13 03:31:52.235137788 +0000 UTC m=+1204.470981334" lastFinishedPulling="2025-12-13 03:31:52.595898216 +0000 UTC m=+1204.831741762" observedRunningTime="2025-12-13 03:31:53.430203204 +0000 UTC m=+1205.666046750" watchObservedRunningTime="2025-12-13 03:31:53.435100917 +0000 UTC m=+1205.670944463" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.446823 5070 scope.go:117] "RemoveContainer" containerID="af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.489186 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.489814 5070 scope.go:117] "RemoveContainer" containerID="9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6" Dec 13 03:31:53 crc kubenswrapper[5070]: E1213 03:31:53.491525 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6\": container with ID starting with 9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6 not found: ID does not exist" containerID="9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.491557 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6"} err="failed to get container status \"9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6\": rpc error: code = NotFound desc = could not find container \"9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6\": container with ID starting with 9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6 not found: ID does not exist" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.491584 5070 scope.go:117] "RemoveContainer" containerID="af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855" Dec 13 03:31:53 crc kubenswrapper[5070]: E1213 03:31:53.492139 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855\": container with ID starting with af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855 not found: ID does not exist" containerID="af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.492162 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855"} err="failed to get container status \"af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855\": rpc error: code = NotFound desc = could not find container \"af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855\": container with ID starting with af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855 not found: ID does not exist" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.492176 5070 scope.go:117] "RemoveContainer" containerID="9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.492579 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6"} err="failed to get container status \"9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6\": rpc error: code = NotFound desc = could not find container \"9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6\": container with ID starting with 9c17232e85b9c25ebc50819990cee9ac3b2f50ba832ad9bc3d7acca432f6ace6 not found: ID does not exist" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.492623 5070 scope.go:117] "RemoveContainer" containerID="af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.494806 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855"} err="failed to get container status \"af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855\": rpc error: code = NotFound desc = could not find container \"af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855\": container with ID starting with af4d65045245ad567cdbc271a231d17db12cd9889f014a6c7cf02498fca9f855 not found: ID does not exist" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.500706 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.513177 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:53 crc kubenswrapper[5070]: E1213 03:31:53.513899 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" containerName="nova-metadata-metadata" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.513954 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" containerName="nova-metadata-metadata" Dec 13 03:31:53 crc kubenswrapper[5070]: E1213 03:31:53.513976 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" containerName="nova-metadata-log" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.513985 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" containerName="nova-metadata-log" Dec 13 03:31:53 crc kubenswrapper[5070]: E1213 03:31:53.513999 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21e9c870-f7e7-4ddf-abbb-56911912f97f" containerName="nova-manage" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.514007 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="21e9c870-f7e7-4ddf-abbb-56911912f97f" containerName="nova-manage" Dec 13 03:31:53 crc kubenswrapper[5070]: E1213 03:31:53.514047 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91cb6c81-3062-4ff8-a34e-50a21bc217c0" containerName="dnsmasq-dns" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.514056 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="91cb6c81-3062-4ff8-a34e-50a21bc217c0" containerName="dnsmasq-dns" Dec 13 03:31:53 crc kubenswrapper[5070]: E1213 03:31:53.514081 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91cb6c81-3062-4ff8-a34e-50a21bc217c0" containerName="init" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.514089 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="91cb6c81-3062-4ff8-a34e-50a21bc217c0" containerName="init" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.514385 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="21e9c870-f7e7-4ddf-abbb-56911912f97f" containerName="nova-manage" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.514407 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" containerName="nova-metadata-log" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.514420 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="91cb6c81-3062-4ff8-a34e-50a21bc217c0" containerName="dnsmasq-dns" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.514453 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" containerName="nova-metadata-metadata" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.515987 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.518530 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.518804 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.523551 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.654238 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.654954 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.655257 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c574c806-7eda-4898-a3d7-92e5a80f9950-logs\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.655347 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxmmd\" (UniqueName: \"kubernetes.io/projected/c574c806-7eda-4898-a3d7-92e5a80f9950-kube-api-access-sxmmd\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.655381 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-config-data\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.756737 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.756817 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c574c806-7eda-4898-a3d7-92e5a80f9950-logs\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.756865 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxmmd\" (UniqueName: \"kubernetes.io/projected/c574c806-7eda-4898-a3d7-92e5a80f9950-kube-api-access-sxmmd\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.756890 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-config-data\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.756972 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.757273 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c574c806-7eda-4898-a3d7-92e5a80f9950-logs\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.760576 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.760750 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-config-data\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.761002 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.775969 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxmmd\" (UniqueName: \"kubernetes.io/projected/c574c806-7eda-4898-a3d7-92e5a80f9950-kube-api-access-sxmmd\") pod \"nova-metadata-0\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " pod="openstack/nova-metadata-0" Dec 13 03:31:53 crc kubenswrapper[5070]: I1213 03:31:53.862229 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.183471 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91cb6c81-3062-4ff8-a34e-50a21bc217c0" path="/var/lib/kubelet/pods/91cb6c81-3062-4ff8-a34e-50a21bc217c0/volumes" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.184062 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94afc769-5904-4c65-bbe1-5ac8e5c12f8e" path="/var/lib/kubelet/pods/94afc769-5904-4c65-bbe1-5ac8e5c12f8e/volumes" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.443436 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.446790 5070 generic.go:334] "Generic (PLEG): container finished" podID="3747c2a3-8ef8-44dc-af66-057337b617ee" containerID="a34216d2a3de0f053012fcb25a16605bc44a33b676e868542e96a41e93ec9a57" exitCode=0 Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.446839 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3747c2a3-8ef8-44dc-af66-057337b617ee","Type":"ContainerDied","Data":"a34216d2a3de0f053012fcb25a16605bc44a33b676e868542e96a41e93ec9a57"} Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.722623 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.737873 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.829141 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-combined-ca-bundle\") pod \"3747c2a3-8ef8-44dc-af66-057337b617ee\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.829312 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-config-data\") pod \"3747c2a3-8ef8-44dc-af66-057337b617ee\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.829474 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsvtw\" (UniqueName: \"kubernetes.io/projected/3747c2a3-8ef8-44dc-af66-057337b617ee-kube-api-access-tsvtw\") pod \"3747c2a3-8ef8-44dc-af66-057337b617ee\" (UID: \"3747c2a3-8ef8-44dc-af66-057337b617ee\") " Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.845656 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3747c2a3-8ef8-44dc-af66-057337b617ee-kube-api-access-tsvtw" (OuterVolumeSpecName: "kube-api-access-tsvtw") pod "3747c2a3-8ef8-44dc-af66-057337b617ee" (UID: "3747c2a3-8ef8-44dc-af66-057337b617ee"). InnerVolumeSpecName "kube-api-access-tsvtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.857654 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-config-data" (OuterVolumeSpecName: "config-data") pod "3747c2a3-8ef8-44dc-af66-057337b617ee" (UID: "3747c2a3-8ef8-44dc-af66-057337b617ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.860808 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3747c2a3-8ef8-44dc-af66-057337b617ee" (UID: "3747c2a3-8ef8-44dc-af66-057337b617ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.931673 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-combined-ca-bundle\") pod \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.931762 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkwm6\" (UniqueName: \"kubernetes.io/projected/19a2a29e-7784-4e4f-99bc-dad38cff1b50-kube-api-access-bkwm6\") pod \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.931867 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-scripts\") pod \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.931916 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-config-data\") pod \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\" (UID: \"19a2a29e-7784-4e4f-99bc-dad38cff1b50\") " Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.932299 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.932315 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3747c2a3-8ef8-44dc-af66-057337b617ee-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.932323 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsvtw\" (UniqueName: \"kubernetes.io/projected/3747c2a3-8ef8-44dc-af66-057337b617ee-kube-api-access-tsvtw\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.935727 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-scripts" (OuterVolumeSpecName: "scripts") pod "19a2a29e-7784-4e4f-99bc-dad38cff1b50" (UID: "19a2a29e-7784-4e4f-99bc-dad38cff1b50"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.935820 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19a2a29e-7784-4e4f-99bc-dad38cff1b50-kube-api-access-bkwm6" (OuterVolumeSpecName: "kube-api-access-bkwm6") pod "19a2a29e-7784-4e4f-99bc-dad38cff1b50" (UID: "19a2a29e-7784-4e4f-99bc-dad38cff1b50"). InnerVolumeSpecName "kube-api-access-bkwm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.954794 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-config-data" (OuterVolumeSpecName: "config-data") pod "19a2a29e-7784-4e4f-99bc-dad38cff1b50" (UID: "19a2a29e-7784-4e4f-99bc-dad38cff1b50"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:54 crc kubenswrapper[5070]: I1213 03:31:54.955347 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19a2a29e-7784-4e4f-99bc-dad38cff1b50" (UID: "19a2a29e-7784-4e4f-99bc-dad38cff1b50"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.034539 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.034571 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkwm6\" (UniqueName: \"kubernetes.io/projected/19a2a29e-7784-4e4f-99bc-dad38cff1b50-kube-api-access-bkwm6\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.034580 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.034588 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19a2a29e-7784-4e4f-99bc-dad38cff1b50-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.463953 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c574c806-7eda-4898-a3d7-92e5a80f9950","Type":"ContainerStarted","Data":"c51073fd26d422ecab3a1776e9a7baa263de6d8ffc5084a6968c64056239c644"} Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.464304 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c574c806-7eda-4898-a3d7-92e5a80f9950","Type":"ContainerStarted","Data":"63e108499b04dc6fbe6906f6d963a0986409cd78dbccccdac7b83c64d0edfa9f"} Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.464318 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c574c806-7eda-4898-a3d7-92e5a80f9950","Type":"ContainerStarted","Data":"9127d612297935ce4cc289c62c2e187ee51993a428882923ec353f85b2bab2e5"} Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.466769 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3747c2a3-8ef8-44dc-af66-057337b617ee","Type":"ContainerDied","Data":"1d255ffb622799e84593756418c470fd6fb730213567f5ebeea5b603c07463da"} Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.466793 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.466822 5070 scope.go:117] "RemoveContainer" containerID="a34216d2a3de0f053012fcb25a16605bc44a33b676e868542e96a41e93ec9a57" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.475216 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6ltlj" event={"ID":"19a2a29e-7784-4e4f-99bc-dad38cff1b50","Type":"ContainerDied","Data":"1e106d376292fcac22c6c9b84f3b0bae05d8e92db7864b24bd51746d723ddfe8"} Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.475284 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e106d376292fcac22c6c9b84f3b0bae05d8e92db7864b24bd51746d723ddfe8" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.475297 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6ltlj" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.515541 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.515419904 podStartE2EDuration="2.515419904s" podCreationTimestamp="2025-12-13 03:31:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:31:55.495857939 +0000 UTC m=+1207.731701526" watchObservedRunningTime="2025-12-13 03:31:55.515419904 +0000 UTC m=+1207.751263450" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.541777 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.558866 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.577201 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 13 03:31:55 crc kubenswrapper[5070]: E1213 03:31:55.577717 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19a2a29e-7784-4e4f-99bc-dad38cff1b50" containerName="nova-cell1-conductor-db-sync" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.577744 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="19a2a29e-7784-4e4f-99bc-dad38cff1b50" containerName="nova-cell1-conductor-db-sync" Dec 13 03:31:55 crc kubenswrapper[5070]: E1213 03:31:55.577764 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3747c2a3-8ef8-44dc-af66-057337b617ee" containerName="nova-scheduler-scheduler" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.577773 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3747c2a3-8ef8-44dc-af66-057337b617ee" containerName="nova-scheduler-scheduler" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.578009 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="19a2a29e-7784-4e4f-99bc-dad38cff1b50" containerName="nova-cell1-conductor-db-sync" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.578036 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3747c2a3-8ef8-44dc-af66-057337b617ee" containerName="nova-scheduler-scheduler" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.578862 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.583609 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.587858 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.618554 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.619787 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.622094 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.631736 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.749649 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e71ec192-d2c9-4dab-9063-5a6639ecb927-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"e71ec192-d2c9-4dab-9063-5a6639ecb927\") " pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.750052 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.750219 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-config-data\") pod \"nova-scheduler-0\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.750331 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e71ec192-d2c9-4dab-9063-5a6639ecb927-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"e71ec192-d2c9-4dab-9063-5a6639ecb927\") " pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.750465 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5gtk\" (UniqueName: \"kubernetes.io/projected/e71ec192-d2c9-4dab-9063-5a6639ecb927-kube-api-access-p5gtk\") pod \"nova-cell1-conductor-0\" (UID: \"e71ec192-d2c9-4dab-9063-5a6639ecb927\") " pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.750605 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn544\" (UniqueName: \"kubernetes.io/projected/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-kube-api-access-mn544\") pod \"nova-scheduler-0\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.852640 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-config-data\") pod \"nova-scheduler-0\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.852687 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e71ec192-d2c9-4dab-9063-5a6639ecb927-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"e71ec192-d2c9-4dab-9063-5a6639ecb927\") " pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.852723 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5gtk\" (UniqueName: \"kubernetes.io/projected/e71ec192-d2c9-4dab-9063-5a6639ecb927-kube-api-access-p5gtk\") pod \"nova-cell1-conductor-0\" (UID: \"e71ec192-d2c9-4dab-9063-5a6639ecb927\") " pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.852754 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn544\" (UniqueName: \"kubernetes.io/projected/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-kube-api-access-mn544\") pod \"nova-scheduler-0\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.852873 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e71ec192-d2c9-4dab-9063-5a6639ecb927-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"e71ec192-d2c9-4dab-9063-5a6639ecb927\") " pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.852935 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.858504 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e71ec192-d2c9-4dab-9063-5a6639ecb927-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"e71ec192-d2c9-4dab-9063-5a6639ecb927\") " pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.862006 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-config-data\") pod \"nova-scheduler-0\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.871763 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.873466 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5gtk\" (UniqueName: \"kubernetes.io/projected/e71ec192-d2c9-4dab-9063-5a6639ecb927-kube-api-access-p5gtk\") pod \"nova-cell1-conductor-0\" (UID: \"e71ec192-d2c9-4dab-9063-5a6639ecb927\") " pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.874938 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn544\" (UniqueName: \"kubernetes.io/projected/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-kube-api-access-mn544\") pod \"nova-scheduler-0\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " pod="openstack/nova-scheduler-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.885726 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e71ec192-d2c9-4dab-9063-5a6639ecb927-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"e71ec192-d2c9-4dab-9063-5a6639ecb927\") " pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.896976 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:55 crc kubenswrapper[5070]: I1213 03:31:55.939208 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.064365 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.177631 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3747c2a3-8ef8-44dc-af66-057337b617ee" path="/var/lib/kubelet/pods/3747c2a3-8ef8-44dc-af66-057337b617ee/volumes" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.264563 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-config-data\") pod \"d457f353-5e36-43dc-9f36-c0f55839debb\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.264634 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfbt7\" (UniqueName: \"kubernetes.io/projected/d457f353-5e36-43dc-9f36-c0f55839debb-kube-api-access-rfbt7\") pod \"d457f353-5e36-43dc-9f36-c0f55839debb\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.264693 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-combined-ca-bundle\") pod \"d457f353-5e36-43dc-9f36-c0f55839debb\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.264720 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-sg-core-conf-yaml\") pod \"d457f353-5e36-43dc-9f36-c0f55839debb\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.264757 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-run-httpd\") pod \"d457f353-5e36-43dc-9f36-c0f55839debb\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.264836 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-log-httpd\") pod \"d457f353-5e36-43dc-9f36-c0f55839debb\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.264888 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-scripts\") pod \"d457f353-5e36-43dc-9f36-c0f55839debb\" (UID: \"d457f353-5e36-43dc-9f36-c0f55839debb\") " Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.265750 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d457f353-5e36-43dc-9f36-c0f55839debb" (UID: "d457f353-5e36-43dc-9f36-c0f55839debb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.266175 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d457f353-5e36-43dc-9f36-c0f55839debb" (UID: "d457f353-5e36-43dc-9f36-c0f55839debb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.269514 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d457f353-5e36-43dc-9f36-c0f55839debb-kube-api-access-rfbt7" (OuterVolumeSpecName: "kube-api-access-rfbt7") pod "d457f353-5e36-43dc-9f36-c0f55839debb" (UID: "d457f353-5e36-43dc-9f36-c0f55839debb"). InnerVolumeSpecName "kube-api-access-rfbt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.271762 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-scripts" (OuterVolumeSpecName: "scripts") pod "d457f353-5e36-43dc-9f36-c0f55839debb" (UID: "d457f353-5e36-43dc-9f36-c0f55839debb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.294975 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d457f353-5e36-43dc-9f36-c0f55839debb" (UID: "d457f353-5e36-43dc-9f36-c0f55839debb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.368246 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfbt7\" (UniqueName: \"kubernetes.io/projected/d457f353-5e36-43dc-9f36-c0f55839debb-kube-api-access-rfbt7\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.368571 5070 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.368584 5070 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.368595 5070 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d457f353-5e36-43dc-9f36-c0f55839debb-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.368606 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.373736 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-config-data" (OuterVolumeSpecName: "config-data") pod "d457f353-5e36-43dc-9f36-c0f55839debb" (UID: "d457f353-5e36-43dc-9f36-c0f55839debb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.382995 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d457f353-5e36-43dc-9f36-c0f55839debb" (UID: "d457f353-5e36-43dc-9f36-c0f55839debb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.387975 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.459288 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.470347 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.470382 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d457f353-5e36-43dc-9f36-c0f55839debb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.484808 5070 generic.go:334] "Generic (PLEG): container finished" podID="d457f353-5e36-43dc-9f36-c0f55839debb" containerID="fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152" exitCode=0 Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.484894 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d457f353-5e36-43dc-9f36-c0f55839debb","Type":"ContainerDied","Data":"fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152"} Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.484933 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d457f353-5e36-43dc-9f36-c0f55839debb","Type":"ContainerDied","Data":"12614ed54ac4877f96fb912c123abfb7bde6f1c1c103da958815363dbb669d5d"} Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.484953 5070 scope.go:117] "RemoveContainer" containerID="d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.485097 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.488014 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"e71ec192-d2c9-4dab-9063-5a6639ecb927","Type":"ContainerStarted","Data":"ff950e845b879b954dd5fd32b4f45752272842ca5cf1406deb1adb5316774422"} Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.489010 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f","Type":"ContainerStarted","Data":"59418cac85b4bbe1361814f05c85f40f650c6015a7eca1fbd96bd8bfd165eadc"} Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.512005 5070 scope.go:117] "RemoveContainer" containerID="2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.519424 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.529018 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.540208 5070 scope.go:117] "RemoveContainer" containerID="fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.544477 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:56 crc kubenswrapper[5070]: E1213 03:31:56.544922 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="sg-core" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.545305 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="sg-core" Dec 13 03:31:56 crc kubenswrapper[5070]: E1213 03:31:56.545380 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="ceilometer-central-agent" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.545433 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="ceilometer-central-agent" Dec 13 03:31:56 crc kubenswrapper[5070]: E1213 03:31:56.545535 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="ceilometer-notification-agent" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.545598 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="ceilometer-notification-agent" Dec 13 03:31:56 crc kubenswrapper[5070]: E1213 03:31:56.545654 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="proxy-httpd" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.545704 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="proxy-httpd" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.545913 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="sg-core" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.545979 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="ceilometer-central-agent" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.546047 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="proxy-httpd" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.546140 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" containerName="ceilometer-notification-agent" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.549166 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.552136 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.552353 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.552458 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.580037 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.603789 5070 scope.go:117] "RemoveContainer" containerID="9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.639061 5070 scope.go:117] "RemoveContainer" containerID="d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3" Dec 13 03:31:56 crc kubenswrapper[5070]: E1213 03:31:56.639813 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3\": container with ID starting with d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3 not found: ID does not exist" containerID="d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.639925 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3"} err="failed to get container status \"d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3\": rpc error: code = NotFound desc = could not find container \"d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3\": container with ID starting with d1904df6ad75378a5917e0c5f801f8b9ca8634ef972674d86605364f0c3301a3 not found: ID does not exist" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.640018 5070 scope.go:117] "RemoveContainer" containerID="2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f" Dec 13 03:31:56 crc kubenswrapper[5070]: E1213 03:31:56.640594 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f\": container with ID starting with 2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f not found: ID does not exist" containerID="2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.640688 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f"} err="failed to get container status \"2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f\": rpc error: code = NotFound desc = could not find container \"2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f\": container with ID starting with 2199cf3bcf69b7cd272b3f88f33c6c4ce0c091410b72be806267486e2f07de0f not found: ID does not exist" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.640732 5070 scope.go:117] "RemoveContainer" containerID="fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152" Dec 13 03:31:56 crc kubenswrapper[5070]: E1213 03:31:56.640982 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152\": container with ID starting with fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152 not found: ID does not exist" containerID="fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.641001 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152"} err="failed to get container status \"fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152\": rpc error: code = NotFound desc = could not find container \"fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152\": container with ID starting with fef2c6a23a422773cbcd2cfeaede2faf9f30dbddefbf5a3c7acd45d20477a152 not found: ID does not exist" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.641015 5070 scope.go:117] "RemoveContainer" containerID="9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8" Dec 13 03:31:56 crc kubenswrapper[5070]: E1213 03:31:56.641234 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8\": container with ID starting with 9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8 not found: ID does not exist" containerID="9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.641253 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8"} err="failed to get container status \"9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8\": rpc error: code = NotFound desc = could not find container \"9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8\": container with ID starting with 9f9a8499d936ea71ec5bcd52b48c3975a1447590f157bd6f1e084c78132176e8 not found: ID does not exist" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.676578 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-log-httpd\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.676678 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-config-data\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.676722 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t28tv\" (UniqueName: \"kubernetes.io/projected/100c82c7-ad01-4aa0-82bd-ddca864ebae3-kube-api-access-t28tv\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.676919 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-run-httpd\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.677100 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.677172 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.677217 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.677477 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-scripts\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.779089 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-config-data\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.779471 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t28tv\" (UniqueName: \"kubernetes.io/projected/100c82c7-ad01-4aa0-82bd-ddca864ebae3-kube-api-access-t28tv\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.779511 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-run-httpd\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.779566 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.779606 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.779629 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.779684 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-scripts\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.779756 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-log-httpd\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.780304 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-log-httpd\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.782052 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-run-httpd\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.790031 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.790349 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-config-data\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.791017 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-scripts\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.791340 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.791715 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.796931 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t28tv\" (UniqueName: \"kubernetes.io/projected/100c82c7-ad01-4aa0-82bd-ddca864ebae3-kube-api-access-t28tv\") pod \"ceilometer-0\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " pod="openstack/ceilometer-0" Dec 13 03:31:56 crc kubenswrapper[5070]: I1213 03:31:56.881672 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:31:57 crc kubenswrapper[5070]: I1213 03:31:57.317318 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:31:57 crc kubenswrapper[5070]: I1213 03:31:57.500345 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"100c82c7-ad01-4aa0-82bd-ddca864ebae3","Type":"ContainerStarted","Data":"931622c9a6816c042df664abd46ec225d1574f5edd77533edbfe8ef477692f3a"} Dec 13 03:31:57 crc kubenswrapper[5070]: I1213 03:31:57.501687 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"e71ec192-d2c9-4dab-9063-5a6639ecb927","Type":"ContainerStarted","Data":"11f15ee71aead6e124056994b1d9e4c58ed1f091db0cfc6047b61931d49a1300"} Dec 13 03:31:57 crc kubenswrapper[5070]: I1213 03:31:57.502747 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 13 03:31:57 crc kubenswrapper[5070]: I1213 03:31:57.504243 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f","Type":"ContainerStarted","Data":"ae46931758d5573f0f2b088f2b02e634f877291a3e5a95c0e80f3046c28818e7"} Dec 13 03:31:57 crc kubenswrapper[5070]: I1213 03:31:57.520272 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.520257958 podStartE2EDuration="2.520257958s" podCreationTimestamp="2025-12-13 03:31:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:31:57.513997836 +0000 UTC m=+1209.749841382" watchObservedRunningTime="2025-12-13 03:31:57.520257958 +0000 UTC m=+1209.756101504" Dec 13 03:31:57 crc kubenswrapper[5070]: I1213 03:31:57.531586 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.531565947 podStartE2EDuration="2.531565947s" podCreationTimestamp="2025-12-13 03:31:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:31:57.530385914 +0000 UTC m=+1209.766229470" watchObservedRunningTime="2025-12-13 03:31:57.531565947 +0000 UTC m=+1209.767409503" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.178591 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d457f353-5e36-43dc-9f36-c0f55839debb" path="/var/lib/kubelet/pods/d457f353-5e36-43dc-9f36-c0f55839debb/volumes" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.462201 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.514627 5070 generic.go:334] "Generic (PLEG): container finished" podID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerID="29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3" exitCode=0 Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.514698 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e","Type":"ContainerDied","Data":"29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3"} Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.514724 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e","Type":"ContainerDied","Data":"15109f439df4098b78716053f2ce159e2218f7c6ed43aaf6f13001f1376cff0f"} Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.514742 5070 scope.go:117] "RemoveContainer" containerID="29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.515591 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.517999 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"100c82c7-ad01-4aa0-82bd-ddca864ebae3","Type":"ContainerStarted","Data":"8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd"} Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.540067 5070 scope.go:117] "RemoveContainer" containerID="bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.555348 5070 scope.go:117] "RemoveContainer" containerID="29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3" Dec 13 03:31:58 crc kubenswrapper[5070]: E1213 03:31:58.555953 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3\": container with ID starting with 29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3 not found: ID does not exist" containerID="29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.556012 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3"} err="failed to get container status \"29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3\": rpc error: code = NotFound desc = could not find container \"29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3\": container with ID starting with 29f3eaa678fca2eb931cae0d0923b2b3741c31e6a94ba558321ac9f40c2552a3 not found: ID does not exist" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.556263 5070 scope.go:117] "RemoveContainer" containerID="bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748" Dec 13 03:31:58 crc kubenswrapper[5070]: E1213 03:31:58.557059 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748\": container with ID starting with bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748 not found: ID does not exist" containerID="bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.557092 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748"} err="failed to get container status \"bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748\": rpc error: code = NotFound desc = could not find container \"bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748\": container with ID starting with bd04c5ca235e71117f52cda60650195408a28b68b79f351225379c96d700b748 not found: ID does not exist" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.613999 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cwcc\" (UniqueName: \"kubernetes.io/projected/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-kube-api-access-7cwcc\") pod \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.614178 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-combined-ca-bundle\") pod \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.614261 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-config-data\") pod \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.614313 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-logs\") pod \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\" (UID: \"cc5eda17-9fb5-4dcc-b16a-dddf126fa50e\") " Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.617781 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-logs" (OuterVolumeSpecName: "logs") pod "cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" (UID: "cc5eda17-9fb5-4dcc-b16a-dddf126fa50e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.622210 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-kube-api-access-7cwcc" (OuterVolumeSpecName: "kube-api-access-7cwcc") pod "cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" (UID: "cc5eda17-9fb5-4dcc-b16a-dddf126fa50e"). InnerVolumeSpecName "kube-api-access-7cwcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.640684 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" (UID: "cc5eda17-9fb5-4dcc-b16a-dddf126fa50e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.657124 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-config-data" (OuterVolumeSpecName: "config-data") pod "cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" (UID: "cc5eda17-9fb5-4dcc-b16a-dddf126fa50e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.715900 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cwcc\" (UniqueName: \"kubernetes.io/projected/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-kube-api-access-7cwcc\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.715932 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.715942 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.715950 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e-logs\") on node \"crc\" DevicePath \"\"" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.876910 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.877385 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.899810 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.912831 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.922515 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 13 03:31:58 crc kubenswrapper[5070]: E1213 03:31:58.922913 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerName="nova-api-log" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.922936 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerName="nova-api-log" Dec 13 03:31:58 crc kubenswrapper[5070]: E1213 03:31:58.922957 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerName="nova-api-api" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.922966 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerName="nova-api-api" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.923154 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerName="nova-api-api" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.923175 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" containerName="nova-api-log" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.924154 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.928376 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 13 03:31:58 crc kubenswrapper[5070]: I1213 03:31:58.933782 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.023686 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05c5be97-d287-43b8-99b0-a1d330f4d2d2-logs\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.023763 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8m8g\" (UniqueName: \"kubernetes.io/projected/05c5be97-d287-43b8-99b0-a1d330f4d2d2-kube-api-access-h8m8g\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.023804 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.023836 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-config-data\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.125650 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05c5be97-d287-43b8-99b0-a1d330f4d2d2-logs\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.125898 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8m8g\" (UniqueName: \"kubernetes.io/projected/05c5be97-d287-43b8-99b0-a1d330f4d2d2-kube-api-access-h8m8g\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.126024 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.126135 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-config-data\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.126078 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05c5be97-d287-43b8-99b0-a1d330f4d2d2-logs\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.129744 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-config-data\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.129885 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.230604 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8m8g\" (UniqueName: \"kubernetes.io/projected/05c5be97-d287-43b8-99b0-a1d330f4d2d2-kube-api-access-h8m8g\") pod \"nova-api-0\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.246727 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.533433 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"100c82c7-ad01-4aa0-82bd-ddca864ebae3","Type":"ContainerStarted","Data":"770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065"} Dec 13 03:31:59 crc kubenswrapper[5070]: I1213 03:31:59.734089 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:00 crc kubenswrapper[5070]: I1213 03:32:00.179364 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc5eda17-9fb5-4dcc-b16a-dddf126fa50e" path="/var/lib/kubelet/pods/cc5eda17-9fb5-4dcc-b16a-dddf126fa50e/volumes" Dec 13 03:32:00 crc kubenswrapper[5070]: I1213 03:32:00.545647 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"05c5be97-d287-43b8-99b0-a1d330f4d2d2","Type":"ContainerStarted","Data":"18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443"} Dec 13 03:32:00 crc kubenswrapper[5070]: I1213 03:32:00.546026 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"05c5be97-d287-43b8-99b0-a1d330f4d2d2","Type":"ContainerStarted","Data":"f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1"} Dec 13 03:32:00 crc kubenswrapper[5070]: I1213 03:32:00.546047 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"05c5be97-d287-43b8-99b0-a1d330f4d2d2","Type":"ContainerStarted","Data":"1ab1df9fd0db782f9a7ec0d17fcd1a49d1d85e4af7715565eb4450f484012bdb"} Dec 13 03:32:00 crc kubenswrapper[5070]: I1213 03:32:00.552500 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"100c82c7-ad01-4aa0-82bd-ddca864ebae3","Type":"ContainerStarted","Data":"bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7"} Dec 13 03:32:00 crc kubenswrapper[5070]: I1213 03:32:00.583334 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.583315998 podStartE2EDuration="2.583315998s" podCreationTimestamp="2025-12-13 03:31:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:32:00.572298687 +0000 UTC m=+1212.808142253" watchObservedRunningTime="2025-12-13 03:32:00.583315998 +0000 UTC m=+1212.819159554" Dec 13 03:32:00 crc kubenswrapper[5070]: I1213 03:32:00.939593 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 13 03:32:01 crc kubenswrapper[5070]: I1213 03:32:01.727390 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 13 03:32:01 crc kubenswrapper[5070]: E1213 03:32:01.994046 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice/crio-a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88\": RecentStats: unable to find data in memory cache]" Dec 13 03:32:02 crc kubenswrapper[5070]: I1213 03:32:02.578251 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"100c82c7-ad01-4aa0-82bd-ddca864ebae3","Type":"ContainerStarted","Data":"bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053"} Dec 13 03:32:02 crc kubenswrapper[5070]: I1213 03:32:02.579093 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 03:32:02 crc kubenswrapper[5070]: I1213 03:32:02.599259 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.485065443 podStartE2EDuration="6.599238385s" podCreationTimestamp="2025-12-13 03:31:56 +0000 UTC" firstStartedPulling="2025-12-13 03:31:57.32197029 +0000 UTC m=+1209.557813836" lastFinishedPulling="2025-12-13 03:32:01.436143232 +0000 UTC m=+1213.671986778" observedRunningTime="2025-12-13 03:32:02.595891043 +0000 UTC m=+1214.831734599" watchObservedRunningTime="2025-12-13 03:32:02.599238385 +0000 UTC m=+1214.835081931" Dec 13 03:32:03 crc kubenswrapper[5070]: I1213 03:32:03.862722 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 13 03:32:03 crc kubenswrapper[5070]: I1213 03:32:03.865059 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 13 03:32:04 crc kubenswrapper[5070]: I1213 03:32:04.878613 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.176:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 03:32:04 crc kubenswrapper[5070]: I1213 03:32:04.878620 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.176:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 03:32:05 crc kubenswrapper[5070]: I1213 03:32:05.928565 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 13 03:32:05 crc kubenswrapper[5070]: I1213 03:32:05.940241 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 13 03:32:05 crc kubenswrapper[5070]: I1213 03:32:05.970101 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 13 03:32:06 crc kubenswrapper[5070]: I1213 03:32:06.664263 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 13 03:32:09 crc kubenswrapper[5070]: I1213 03:32:09.247266 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 03:32:09 crc kubenswrapper[5070]: I1213 03:32:09.247603 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 03:32:10 crc kubenswrapper[5070]: I1213 03:32:10.329593 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.180:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 03:32:10 crc kubenswrapper[5070]: I1213 03:32:10.329649 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.180:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 03:32:12 crc kubenswrapper[5070]: E1213 03:32:12.228431 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice/crio-a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88\": RecentStats: unable to find data in memory cache]" Dec 13 03:32:13 crc kubenswrapper[5070]: I1213 03:32:13.871224 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 13 03:32:13 crc kubenswrapper[5070]: I1213 03:32:13.871615 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 13 03:32:13 crc kubenswrapper[5070]: I1213 03:32:13.880941 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 13 03:32:13 crc kubenswrapper[5070]: I1213 03:32:13.886854 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.610211 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.735386 5070 generic.go:334] "Generic (PLEG): container finished" podID="e9c685a9-2e74-4815-b092-35d4d1100ecf" containerID="1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065" exitCode=137 Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.735425 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e9c685a9-2e74-4815-b092-35d4d1100ecf","Type":"ContainerDied","Data":"1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065"} Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.735458 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.735495 5070 scope.go:117] "RemoveContainer" containerID="1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.735464 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e9c685a9-2e74-4815-b092-35d4d1100ecf","Type":"ContainerDied","Data":"3165380bf8144020aa2d9b08dd971db068827e90592c907143b8b4714a3912a0"} Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.756716 5070 scope.go:117] "RemoveContainer" containerID="1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065" Dec 13 03:32:17 crc kubenswrapper[5070]: E1213 03:32:17.757747 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065\": container with ID starting with 1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065 not found: ID does not exist" containerID="1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.757807 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065"} err="failed to get container status \"1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065\": rpc error: code = NotFound desc = could not find container \"1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065\": container with ID starting with 1e4383444ee8d4adcf23f2dc8d02643dda91e192cb924b619ac300649d426065 not found: ID does not exist" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.768511 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftj8f\" (UniqueName: \"kubernetes.io/projected/e9c685a9-2e74-4815-b092-35d4d1100ecf-kube-api-access-ftj8f\") pod \"e9c685a9-2e74-4815-b092-35d4d1100ecf\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.768641 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-combined-ca-bundle\") pod \"e9c685a9-2e74-4815-b092-35d4d1100ecf\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.768676 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-config-data\") pod \"e9c685a9-2e74-4815-b092-35d4d1100ecf\" (UID: \"e9c685a9-2e74-4815-b092-35d4d1100ecf\") " Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.774353 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9c685a9-2e74-4815-b092-35d4d1100ecf-kube-api-access-ftj8f" (OuterVolumeSpecName: "kube-api-access-ftj8f") pod "e9c685a9-2e74-4815-b092-35d4d1100ecf" (UID: "e9c685a9-2e74-4815-b092-35d4d1100ecf"). InnerVolumeSpecName "kube-api-access-ftj8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.795403 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-config-data" (OuterVolumeSpecName: "config-data") pod "e9c685a9-2e74-4815-b092-35d4d1100ecf" (UID: "e9c685a9-2e74-4815-b092-35d4d1100ecf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.797293 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9c685a9-2e74-4815-b092-35d4d1100ecf" (UID: "e9c685a9-2e74-4815-b092-35d4d1100ecf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.871800 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.871892 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftj8f\" (UniqueName: \"kubernetes.io/projected/e9c685a9-2e74-4815-b092-35d4d1100ecf-kube-api-access-ftj8f\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:17 crc kubenswrapper[5070]: I1213 03:32:17.872201 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c685a9-2e74-4815-b092-35d4d1100ecf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.068926 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.086894 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.096819 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 03:32:18 crc kubenswrapper[5070]: E1213 03:32:18.097375 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9c685a9-2e74-4815-b092-35d4d1100ecf" containerName="nova-cell1-novncproxy-novncproxy" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.097401 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9c685a9-2e74-4815-b092-35d4d1100ecf" containerName="nova-cell1-novncproxy-novncproxy" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.097664 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9c685a9-2e74-4815-b092-35d4d1100ecf" containerName="nova-cell1-novncproxy-novncproxy" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.098536 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.100719 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.102689 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.106118 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.120382 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.177070 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.177140 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.177214 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.177246 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.177310 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfbn7\" (UniqueName: \"kubernetes.io/projected/cfd0162b-8004-4817-8f85-efd5c493e3c0-kube-api-access-qfbn7\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.178730 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9c685a9-2e74-4815-b092-35d4d1100ecf" path="/var/lib/kubelet/pods/e9c685a9-2e74-4815-b092-35d4d1100ecf/volumes" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.278805 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.278888 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.278974 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.279005 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.279077 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfbn7\" (UniqueName: \"kubernetes.io/projected/cfd0162b-8004-4817-8f85-efd5c493e3c0-kube-api-access-qfbn7\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.282306 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.282416 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.283756 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.287153 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfd0162b-8004-4817-8f85-efd5c493e3c0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.297158 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfbn7\" (UniqueName: \"kubernetes.io/projected/cfd0162b-8004-4817-8f85-efd5c493e3c0-kube-api-access-qfbn7\") pod \"nova-cell1-novncproxy-0\" (UID: \"cfd0162b-8004-4817-8f85-efd5c493e3c0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.416616 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:18 crc kubenswrapper[5070]: I1213 03:32:18.898666 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 03:32:18 crc kubenswrapper[5070]: W1213 03:32:18.900664 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfd0162b_8004_4817_8f85_efd5c493e3c0.slice/crio-6dc8613744d4996a33c988f136c3dc88d1d04a85e896a43b462d94a88a0166ff WatchSource:0}: Error finding container 6dc8613744d4996a33c988f136c3dc88d1d04a85e896a43b462d94a88a0166ff: Status 404 returned error can't find the container with id 6dc8613744d4996a33c988f136c3dc88d1d04a85e896a43b462d94a88a0166ff Dec 13 03:32:19 crc kubenswrapper[5070]: I1213 03:32:19.255784 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 13 03:32:19 crc kubenswrapper[5070]: I1213 03:32:19.256475 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 13 03:32:19 crc kubenswrapper[5070]: I1213 03:32:19.260764 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 13 03:32:19 crc kubenswrapper[5070]: I1213 03:32:19.267744 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 13 03:32:19 crc kubenswrapper[5070]: I1213 03:32:19.759026 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"cfd0162b-8004-4817-8f85-efd5c493e3c0","Type":"ContainerStarted","Data":"d19f4346fd4a140cb255e8a23bb53b4b3d3782d92a7a9584d36e2ac422739ade"} Dec 13 03:32:19 crc kubenswrapper[5070]: I1213 03:32:19.759377 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"cfd0162b-8004-4817-8f85-efd5c493e3c0","Type":"ContainerStarted","Data":"6dc8613744d4996a33c988f136c3dc88d1d04a85e896a43b462d94a88a0166ff"} Dec 13 03:32:19 crc kubenswrapper[5070]: I1213 03:32:19.759395 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 13 03:32:19 crc kubenswrapper[5070]: I1213 03:32:19.793377 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.7933632290000001 podStartE2EDuration="1.793363229s" podCreationTimestamp="2025-12-13 03:32:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:32:19.78571829 +0000 UTC m=+1232.021561836" watchObservedRunningTime="2025-12-13 03:32:19.793363229 +0000 UTC m=+1232.029206775" Dec 13 03:32:19 crc kubenswrapper[5070]: I1213 03:32:19.834778 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.019852 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5qpfb"] Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.021850 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.066945 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5qpfb"] Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.113274 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-config\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.113329 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.113382 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9qvw\" (UniqueName: \"kubernetes.io/projected/c0887592-9adc-49d0-b351-f3d22bfb2ca2-kube-api-access-f9qvw\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.113598 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.113665 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.217745 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-config\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.217786 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.217820 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9qvw\" (UniqueName: \"kubernetes.io/projected/c0887592-9adc-49d0-b351-f3d22bfb2ca2-kube-api-access-f9qvw\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.218056 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.218082 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.219167 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.219164 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.219233 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.219285 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-config\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.251277 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9qvw\" (UniqueName: \"kubernetes.io/projected/c0887592-9adc-49d0-b351-f3d22bfb2ca2-kube-api-access-f9qvw\") pod \"dnsmasq-dns-68d4b6d797-5qpfb\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.368714 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:20 crc kubenswrapper[5070]: I1213 03:32:20.854736 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5qpfb"] Dec 13 03:32:21 crc kubenswrapper[5070]: I1213 03:32:21.781587 5070 generic.go:334] "Generic (PLEG): container finished" podID="c0887592-9adc-49d0-b351-f3d22bfb2ca2" containerID="c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76" exitCode=0 Dec 13 03:32:21 crc kubenswrapper[5070]: I1213 03:32:21.781793 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" event={"ID":"c0887592-9adc-49d0-b351-f3d22bfb2ca2","Type":"ContainerDied","Data":"c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76"} Dec 13 03:32:21 crc kubenswrapper[5070]: I1213 03:32:21.781948 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" event={"ID":"c0887592-9adc-49d0-b351-f3d22bfb2ca2","Type":"ContainerStarted","Data":"004a2d6f4dd77f7ec3e3162e501d42c8b6b1be8f3cd8fa622449cb2a00b581be"} Dec 13 03:32:21 crc kubenswrapper[5070]: I1213 03:32:21.945515 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:32:21 crc kubenswrapper[5070]: I1213 03:32:21.945877 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.362722 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.363004 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="ceilometer-central-agent" containerID="cri-o://8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd" gracePeriod=30 Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.363047 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="proxy-httpd" containerID="cri-o://bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053" gracePeriod=30 Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.363125 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="sg-core" containerID="cri-o://bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7" gracePeriod=30 Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.363166 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="ceilometer-notification-agent" containerID="cri-o://770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065" gracePeriod=30 Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.380647 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 13 03:32:22 crc kubenswrapper[5070]: E1213 03:32:22.502525 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod100c82c7_ad01_4aa0_82bd_ddca864ebae3.slice/crio-bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice/crio-a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod100c82c7_ad01_4aa0_82bd_ddca864ebae3.slice/crio-conmon-bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7.scope\": RecentStats: unable to find data in memory cache]" Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.615489 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.792230 5070 generic.go:334] "Generic (PLEG): container finished" podID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerID="bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053" exitCode=0 Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.792283 5070 generic.go:334] "Generic (PLEG): container finished" podID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerID="bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7" exitCode=2 Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.792318 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"100c82c7-ad01-4aa0-82bd-ddca864ebae3","Type":"ContainerDied","Data":"bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053"} Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.792373 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"100c82c7-ad01-4aa0-82bd-ddca864ebae3","Type":"ContainerDied","Data":"bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7"} Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.794593 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" event={"ID":"c0887592-9adc-49d0-b351-f3d22bfb2ca2","Type":"ContainerStarted","Data":"cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49"} Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.794727 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerName="nova-api-log" containerID="cri-o://f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1" gracePeriod=30 Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.794794 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerName="nova-api-api" containerID="cri-o://18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443" gracePeriod=30 Dec 13 03:32:22 crc kubenswrapper[5070]: I1213 03:32:22.821402 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" podStartSLOduration=3.8213757299999997 podStartE2EDuration="3.82137573s" podCreationTimestamp="2025-12-13 03:32:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:32:22.813048781 +0000 UTC m=+1235.048892337" watchObservedRunningTime="2025-12-13 03:32:22.82137573 +0000 UTC m=+1235.057219276" Dec 13 03:32:23 crc kubenswrapper[5070]: I1213 03:32:23.417434 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:23 crc kubenswrapper[5070]: I1213 03:32:23.804676 5070 generic.go:334] "Generic (PLEG): container finished" podID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerID="f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1" exitCode=143 Dec 13 03:32:23 crc kubenswrapper[5070]: I1213 03:32:23.804741 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"05c5be97-d287-43b8-99b0-a1d330f4d2d2","Type":"ContainerDied","Data":"f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1"} Dec 13 03:32:23 crc kubenswrapper[5070]: I1213 03:32:23.808601 5070 generic.go:334] "Generic (PLEG): container finished" podID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerID="8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd" exitCode=0 Dec 13 03:32:23 crc kubenswrapper[5070]: I1213 03:32:23.809579 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"100c82c7-ad01-4aa0-82bd-ddca864ebae3","Type":"ContainerDied","Data":"8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd"} Dec 13 03:32:23 crc kubenswrapper[5070]: I1213 03:32:23.809625 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.758456 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.820240 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t28tv\" (UniqueName: \"kubernetes.io/projected/100c82c7-ad01-4aa0-82bd-ddca864ebae3-kube-api-access-t28tv\") pod \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.820350 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-ceilometer-tls-certs\") pod \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.820545 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-log-httpd\") pod \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.820606 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-run-httpd\") pod \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.820638 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-scripts\") pod \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.820691 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-combined-ca-bundle\") pod \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.820813 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-config-data\") pod \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.820906 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-sg-core-conf-yaml\") pod \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\" (UID: \"100c82c7-ad01-4aa0-82bd-ddca864ebae3\") " Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.820950 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "100c82c7-ad01-4aa0-82bd-ddca864ebae3" (UID: "100c82c7-ad01-4aa0-82bd-ddca864ebae3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.821471 5070 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.821597 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "100c82c7-ad01-4aa0-82bd-ddca864ebae3" (UID: "100c82c7-ad01-4aa0-82bd-ddca864ebae3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.826556 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/100c82c7-ad01-4aa0-82bd-ddca864ebae3-kube-api-access-t28tv" (OuterVolumeSpecName: "kube-api-access-t28tv") pod "100c82c7-ad01-4aa0-82bd-ddca864ebae3" (UID: "100c82c7-ad01-4aa0-82bd-ddca864ebae3"). InnerVolumeSpecName "kube-api-access-t28tv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.827688 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-scripts" (OuterVolumeSpecName: "scripts") pod "100c82c7-ad01-4aa0-82bd-ddca864ebae3" (UID: "100c82c7-ad01-4aa0-82bd-ddca864ebae3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.840368 5070 generic.go:334] "Generic (PLEG): container finished" podID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerID="770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065" exitCode=0 Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.840478 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"100c82c7-ad01-4aa0-82bd-ddca864ebae3","Type":"ContainerDied","Data":"770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065"} Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.840506 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.840522 5070 scope.go:117] "RemoveContainer" containerID="bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.840509 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"100c82c7-ad01-4aa0-82bd-ddca864ebae3","Type":"ContainerDied","Data":"931622c9a6816c042df664abd46ec225d1574f5edd77533edbfe8ef477692f3a"} Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.853866 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "100c82c7-ad01-4aa0-82bd-ddca864ebae3" (UID: "100c82c7-ad01-4aa0-82bd-ddca864ebae3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.876733 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "100c82c7-ad01-4aa0-82bd-ddca864ebae3" (UID: "100c82c7-ad01-4aa0-82bd-ddca864ebae3"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.919533 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "100c82c7-ad01-4aa0-82bd-ddca864ebae3" (UID: "100c82c7-ad01-4aa0-82bd-ddca864ebae3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.923908 5070 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.923930 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t28tv\" (UniqueName: \"kubernetes.io/projected/100c82c7-ad01-4aa0-82bd-ddca864ebae3-kube-api-access-t28tv\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.923939 5070 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.923947 5070 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/100c82c7-ad01-4aa0-82bd-ddca864ebae3-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.923956 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.923965 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:25 crc kubenswrapper[5070]: I1213 03:32:25.942813 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-config-data" (OuterVolumeSpecName: "config-data") pod "100c82c7-ad01-4aa0-82bd-ddca864ebae3" (UID: "100c82c7-ad01-4aa0-82bd-ddca864ebae3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.025495 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/100c82c7-ad01-4aa0-82bd-ddca864ebae3-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.131340 5070 scope.go:117] "RemoveContainer" containerID="bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.149807 5070 scope.go:117] "RemoveContainer" containerID="770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.226549 5070 scope.go:117] "RemoveContainer" containerID="8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.228393 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.238481 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.248294 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.248774 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="ceilometer-notification-agent" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.248799 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="ceilometer-notification-agent" Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.248817 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="ceilometer-central-agent" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.248828 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="ceilometer-central-agent" Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.248852 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="sg-core" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.248861 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="sg-core" Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.248890 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="proxy-httpd" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.248898 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="proxy-httpd" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.249140 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="proxy-httpd" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.249155 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="ceilometer-central-agent" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.249180 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="ceilometer-notification-agent" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.249198 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" containerName="sg-core" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.249661 5070 scope.go:117] "RemoveContainer" containerID="bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053" Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.250100 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053\": container with ID starting with bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053 not found: ID does not exist" containerID="bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.250138 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053"} err="failed to get container status \"bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053\": rpc error: code = NotFound desc = could not find container \"bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053\": container with ID starting with bcad05573cc2318977a4c9e042ada148a6df05fb9d0290d6efa62d3b7e2ff053 not found: ID does not exist" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.250165 5070 scope.go:117] "RemoveContainer" containerID="bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7" Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.250410 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7\": container with ID starting with bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7 not found: ID does not exist" containerID="bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.250427 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7"} err="failed to get container status \"bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7\": rpc error: code = NotFound desc = could not find container \"bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7\": container with ID starting with bdc24c6846bca7892f55714b6b8d166853a75b3ad9ebf1c10ce7e313826e4fc7 not found: ID does not exist" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.250456 5070 scope.go:117] "RemoveContainer" containerID="770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065" Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.250638 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065\": container with ID starting with 770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065 not found: ID does not exist" containerID="770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.250851 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065"} err="failed to get container status \"770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065\": rpc error: code = NotFound desc = could not find container \"770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065\": container with ID starting with 770730c75c8f3aced2aa1c444c44952ccaf658492d76d7eea1b84ff4bfb72065 not found: ID does not exist" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.250870 5070 scope.go:117] "RemoveContainer" containerID="8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.251242 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.251349 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd\": container with ID starting with 8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd not found: ID does not exist" containerID="8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.251393 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd"} err="failed to get container status \"8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd\": rpc error: code = NotFound desc = could not find container \"8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd\": container with ID starting with 8947da473a129a723485267e60fb8a98ec1dcb2919f0714de331a865321259dd not found: ID does not exist" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.253953 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.253971 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.257338 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.263459 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.336610 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.341948 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.342012 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-log-httpd\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.342066 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.342105 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-config-data\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.342141 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-run-httpd\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.342164 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxqrq\" (UniqueName: \"kubernetes.io/projected/ae7d2789-ba9f-497c-a444-a6a973ae174d-kube-api-access-qxqrq\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.342187 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.342215 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-scripts\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.450989 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8m8g\" (UniqueName: \"kubernetes.io/projected/05c5be97-d287-43b8-99b0-a1d330f4d2d2-kube-api-access-h8m8g\") pod \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451109 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-combined-ca-bundle\") pod \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451192 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05c5be97-d287-43b8-99b0-a1d330f4d2d2-logs\") pod \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451356 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-config-data\") pod \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\" (UID: \"05c5be97-d287-43b8-99b0-a1d330f4d2d2\") " Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451585 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451625 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-scripts\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451755 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451793 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-log-httpd\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451843 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451882 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-config-data\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451916 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-run-httpd\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.451940 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxqrq\" (UniqueName: \"kubernetes.io/projected/ae7d2789-ba9f-497c-a444-a6a973ae174d-kube-api-access-qxqrq\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.465932 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-run-httpd\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.466386 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-log-httpd\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.466793 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05c5be97-d287-43b8-99b0-a1d330f4d2d2-logs" (OuterVolumeSpecName: "logs") pod "05c5be97-d287-43b8-99b0-a1d330f4d2d2" (UID: "05c5be97-d287-43b8-99b0-a1d330f4d2d2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.467554 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05c5be97-d287-43b8-99b0-a1d330f4d2d2-kube-api-access-h8m8g" (OuterVolumeSpecName: "kube-api-access-h8m8g") pod "05c5be97-d287-43b8-99b0-a1d330f4d2d2" (UID: "05c5be97-d287-43b8-99b0-a1d330f4d2d2"). InnerVolumeSpecName "kube-api-access-h8m8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.475175 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.489643 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxqrq\" (UniqueName: \"kubernetes.io/projected/ae7d2789-ba9f-497c-a444-a6a973ae174d-kube-api-access-qxqrq\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.495859 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.496232 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.496633 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-scripts\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.500275 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-config-data\") pod \"ceilometer-0\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.516197 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05c5be97-d287-43b8-99b0-a1d330f4d2d2" (UID: "05c5be97-d287-43b8-99b0-a1d330f4d2d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.532613 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-config-data" (OuterVolumeSpecName: "config-data") pod "05c5be97-d287-43b8-99b0-a1d330f4d2d2" (UID: "05c5be97-d287-43b8-99b0-a1d330f4d2d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.553880 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.554120 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05c5be97-d287-43b8-99b0-a1d330f4d2d2-logs\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.554130 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c5be97-d287-43b8-99b0-a1d330f4d2d2-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.554138 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8m8g\" (UniqueName: \"kubernetes.io/projected/05c5be97-d287-43b8-99b0-a1d330f4d2d2-kube-api-access-h8m8g\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.574670 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.854276 5070 generic.go:334] "Generic (PLEG): container finished" podID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerID="18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443" exitCode=0 Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.854323 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.854364 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"05c5be97-d287-43b8-99b0-a1d330f4d2d2","Type":"ContainerDied","Data":"18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443"} Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.854421 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"05c5be97-d287-43b8-99b0-a1d330f4d2d2","Type":"ContainerDied","Data":"1ab1df9fd0db782f9a7ec0d17fcd1a49d1d85e4af7715565eb4450f484012bdb"} Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.854458 5070 scope.go:117] "RemoveContainer" containerID="18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.900366 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.903691 5070 scope.go:117] "RemoveContainer" containerID="f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.923362 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.934465 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.934861 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerName="nova-api-api" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.934878 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerName="nova-api-api" Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.934903 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerName="nova-api-log" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.934910 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerName="nova-api-log" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.935088 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerName="nova-api-api" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.935118 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" containerName="nova-api-log" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.936080 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.944430 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.947499 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.949914 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.950039 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.964591 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-config-data\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.964640 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aab1756e-d68e-4718-b56a-59bf625e16bf-logs\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.964664 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-public-tls-certs\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.964715 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.964745 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7rgf\" (UniqueName: \"kubernetes.io/projected/aab1756e-d68e-4718-b56a-59bf625e16bf-kube-api-access-l7rgf\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.964774 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-internal-tls-certs\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.977619 5070 scope.go:117] "RemoveContainer" containerID="18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443" Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.978006 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443\": container with ID starting with 18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443 not found: ID does not exist" containerID="18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.978044 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443"} err="failed to get container status \"18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443\": rpc error: code = NotFound desc = could not find container \"18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443\": container with ID starting with 18643b2d115a40fecebefc35602a59df31479c988d98d395b4e174ab20171443 not found: ID does not exist" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.978069 5070 scope.go:117] "RemoveContainer" containerID="f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1" Dec 13 03:32:26 crc kubenswrapper[5070]: E1213 03:32:26.978464 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1\": container with ID starting with f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1 not found: ID does not exist" containerID="f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1" Dec 13 03:32:26 crc kubenswrapper[5070]: I1213 03:32:26.978492 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1"} err="failed to get container status \"f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1\": rpc error: code = NotFound desc = could not find container \"f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1\": container with ID starting with f50c4f5527bd94edcefe4a6c30b852d770a27f6dbba4fcc6afe9e9aab3db31d1 not found: ID does not exist" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.030290 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.066905 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.066968 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7rgf\" (UniqueName: \"kubernetes.io/projected/aab1756e-d68e-4718-b56a-59bf625e16bf-kube-api-access-l7rgf\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.067004 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-internal-tls-certs\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.067076 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-config-data\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.067126 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aab1756e-d68e-4718-b56a-59bf625e16bf-logs\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.067148 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-public-tls-certs\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.068729 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aab1756e-d68e-4718-b56a-59bf625e16bf-logs\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.072026 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-public-tls-certs\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.072127 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.073601 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-config-data\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.073608 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-internal-tls-certs\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.090007 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7rgf\" (UniqueName: \"kubernetes.io/projected/aab1756e-d68e-4718-b56a-59bf625e16bf-kube-api-access-l7rgf\") pod \"nova-api-0\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.280395 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:32:27 crc kubenswrapper[5070]: W1213 03:32:27.762723 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaab1756e_d68e_4718_b56a_59bf625e16bf.slice/crio-8d663b1f8f24474b7e931b8ae4d99690e269d6cd360b7cf4544df782ad140083 WatchSource:0}: Error finding container 8d663b1f8f24474b7e931b8ae4d99690e269d6cd360b7cf4544df782ad140083: Status 404 returned error can't find the container with id 8d663b1f8f24474b7e931b8ae4d99690e269d6cd360b7cf4544df782ad140083 Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.763479 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.865903 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ae7d2789-ba9f-497c-a444-a6a973ae174d","Type":"ContainerStarted","Data":"9c04bc14d773cc2b53835361270bcb275ed128ba4e6833d2290349d1918c8ac5"} Dec 13 03:32:27 crc kubenswrapper[5070]: I1213 03:32:27.868295 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aab1756e-d68e-4718-b56a-59bf625e16bf","Type":"ContainerStarted","Data":"8d663b1f8f24474b7e931b8ae4d99690e269d6cd360b7cf4544df782ad140083"} Dec 13 03:32:28 crc kubenswrapper[5070]: I1213 03:32:28.176611 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05c5be97-d287-43b8-99b0-a1d330f4d2d2" path="/var/lib/kubelet/pods/05c5be97-d287-43b8-99b0-a1d330f4d2d2/volumes" Dec 13 03:32:28 crc kubenswrapper[5070]: I1213 03:32:28.177491 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="100c82c7-ad01-4aa0-82bd-ddca864ebae3" path="/var/lib/kubelet/pods/100c82c7-ad01-4aa0-82bd-ddca864ebae3/volumes" Dec 13 03:32:28 crc kubenswrapper[5070]: I1213 03:32:28.417657 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:28 crc kubenswrapper[5070]: I1213 03:32:28.443048 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:28 crc kubenswrapper[5070]: I1213 03:32:28.877881 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ae7d2789-ba9f-497c-a444-a6a973ae174d","Type":"ContainerStarted","Data":"94082177bcb0ed710678b98bb5f42bdd8ed78ba2c8769ec5466eee631cb71cfb"} Dec 13 03:32:28 crc kubenswrapper[5070]: I1213 03:32:28.878220 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ae7d2789-ba9f-497c-a444-a6a973ae174d","Type":"ContainerStarted","Data":"158d54d24f96255ea9ac0e5e391f70e01b4abd627af90260f0f30443b4dac8dd"} Dec 13 03:32:28 crc kubenswrapper[5070]: I1213 03:32:28.879781 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aab1756e-d68e-4718-b56a-59bf625e16bf","Type":"ContainerStarted","Data":"dc736f0188e92c287f63148ccfcce62053581e40bcdec8b4958dacfd72f179d9"} Dec 13 03:32:28 crc kubenswrapper[5070]: I1213 03:32:28.879821 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aab1756e-d68e-4718-b56a-59bf625e16bf","Type":"ContainerStarted","Data":"1db10164433b257453c495542b42d4ef9e6580becd4440d018cde371b3c1eef8"} Dec 13 03:32:28 crc kubenswrapper[5070]: I1213 03:32:28.894417 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 13 03:32:28 crc kubenswrapper[5070]: I1213 03:32:28.907882 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.907859905 podStartE2EDuration="2.907859905s" podCreationTimestamp="2025-12-13 03:32:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:32:28.906662573 +0000 UTC m=+1241.142506119" watchObservedRunningTime="2025-12-13 03:32:28.907859905 +0000 UTC m=+1241.143703451" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.084069 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-n59t8"] Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.085486 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.090595 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.093011 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-n59t8"] Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.093679 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.107343 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-scripts\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.107391 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-config-data\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.107425 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-927z9\" (UniqueName: \"kubernetes.io/projected/a485ad64-a9af-43f3-863a-0ee52612df72-kube-api-access-927z9\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.107490 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.208746 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-scripts\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.208825 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-config-data\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.208884 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-927z9\" (UniqueName: \"kubernetes.io/projected/a485ad64-a9af-43f3-863a-0ee52612df72-kube-api-access-927z9\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.208955 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.215239 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.229086 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-config-data\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.229357 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-scripts\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.232975 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-927z9\" (UniqueName: \"kubernetes.io/projected/a485ad64-a9af-43f3-863a-0ee52612df72-kube-api-access-927z9\") pod \"nova-cell1-cell-mapping-n59t8\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.411500 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.923558 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ae7d2789-ba9f-497c-a444-a6a973ae174d","Type":"ContainerStarted","Data":"22cf25b51e02a9782bce2e80e4af85b93bfc55609ae89cbc2d9d3ef418b8908a"} Dec 13 03:32:29 crc kubenswrapper[5070]: I1213 03:32:29.935807 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-n59t8"] Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.370625 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.440198 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-2j5dx"] Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.440536 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" podUID="30d804de-c917-4ff4-9576-3e8410417e0a" containerName="dnsmasq-dns" containerID="cri-o://a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf" gracePeriod=10 Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.858460 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.946699 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqpcs\" (UniqueName: \"kubernetes.io/projected/30d804de-c917-4ff4-9576-3e8410417e0a-kube-api-access-jqpcs\") pod \"30d804de-c917-4ff4-9576-3e8410417e0a\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.946753 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-dns-svc\") pod \"30d804de-c917-4ff4-9576-3e8410417e0a\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.946780 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-nb\") pod \"30d804de-c917-4ff4-9576-3e8410417e0a\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.946803 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-config\") pod \"30d804de-c917-4ff4-9576-3e8410417e0a\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.946859 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-sb\") pod \"30d804de-c917-4ff4-9576-3e8410417e0a\" (UID: \"30d804de-c917-4ff4-9576-3e8410417e0a\") " Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.955528 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30d804de-c917-4ff4-9576-3e8410417e0a-kube-api-access-jqpcs" (OuterVolumeSpecName: "kube-api-access-jqpcs") pod "30d804de-c917-4ff4-9576-3e8410417e0a" (UID: "30d804de-c917-4ff4-9576-3e8410417e0a"). InnerVolumeSpecName "kube-api-access-jqpcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.960716 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n59t8" event={"ID":"a485ad64-a9af-43f3-863a-0ee52612df72","Type":"ContainerStarted","Data":"6f644a434838affefee596628949bb8f090c1d7a31bb167fba52cfd8addfe0e9"} Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.960759 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n59t8" event={"ID":"a485ad64-a9af-43f3-863a-0ee52612df72","Type":"ContainerStarted","Data":"3344a5addf1882e512a36b170e2862957bae03ff1b8bf899d6fa7a374d1fd041"} Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.970868 5070 generic.go:334] "Generic (PLEG): container finished" podID="30d804de-c917-4ff4-9576-3e8410417e0a" containerID="a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf" exitCode=0 Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.970897 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" event={"ID":"30d804de-c917-4ff4-9576-3e8410417e0a","Type":"ContainerDied","Data":"a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf"} Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.970915 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" event={"ID":"30d804de-c917-4ff4-9576-3e8410417e0a","Type":"ContainerDied","Data":"dac413f072b2ba18cd5ed24163668704a74b83e4b3578208c8a0b26544f63710"} Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.970934 5070 scope.go:117] "RemoveContainer" containerID="a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf" Dec 13 03:32:30 crc kubenswrapper[5070]: I1213 03:32:30.971039 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-2j5dx" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.000561 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-n59t8" podStartSLOduration=2.000542473 podStartE2EDuration="2.000542473s" podCreationTimestamp="2025-12-13 03:32:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:32:30.979743114 +0000 UTC m=+1243.215586670" watchObservedRunningTime="2025-12-13 03:32:31.000542473 +0000 UTC m=+1243.236386019" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.037378 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "30d804de-c917-4ff4-9576-3e8410417e0a" (UID: "30d804de-c917-4ff4-9576-3e8410417e0a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.043126 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-config" (OuterVolumeSpecName: "config") pod "30d804de-c917-4ff4-9576-3e8410417e0a" (UID: "30d804de-c917-4ff4-9576-3e8410417e0a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.049621 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqpcs\" (UniqueName: \"kubernetes.io/projected/30d804de-c917-4ff4-9576-3e8410417e0a-kube-api-access-jqpcs\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.050662 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.050753 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.058531 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "30d804de-c917-4ff4-9576-3e8410417e0a" (UID: "30d804de-c917-4ff4-9576-3e8410417e0a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.064871 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "30d804de-c917-4ff4-9576-3e8410417e0a" (UID: "30d804de-c917-4ff4-9576-3e8410417e0a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.147286 5070 scope.go:117] "RemoveContainer" containerID="3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.151580 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.151615 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30d804de-c917-4ff4-9576-3e8410417e0a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.170786 5070 scope.go:117] "RemoveContainer" containerID="a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf" Dec 13 03:32:31 crc kubenswrapper[5070]: E1213 03:32:31.171296 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf\": container with ID starting with a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf not found: ID does not exist" containerID="a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.171333 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf"} err="failed to get container status \"a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf\": rpc error: code = NotFound desc = could not find container \"a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf\": container with ID starting with a86dc0f781b3633ecbf38484b46a41a5fc0d82dda45217b6bbd310ba3511ceaf not found: ID does not exist" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.171354 5070 scope.go:117] "RemoveContainer" containerID="3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af" Dec 13 03:32:31 crc kubenswrapper[5070]: E1213 03:32:31.171753 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af\": container with ID starting with 3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af not found: ID does not exist" containerID="3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.171774 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af"} err="failed to get container status \"3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af\": rpc error: code = NotFound desc = could not find container \"3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af\": container with ID starting with 3a4e8fa2029ef52b4a2d3221a62fa9df189ccbfa51bae5dab00b3fe3c38a78af not found: ID does not exist" Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.305689 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-2j5dx"] Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.313348 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-2j5dx"] Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.982617 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ae7d2789-ba9f-497c-a444-a6a973ae174d","Type":"ContainerStarted","Data":"4bf21c7eaa6cb8982af7d872506ff0e69122e8ab58dbaccb40a8dc4f683b05f3"} Dec 13 03:32:31 crc kubenswrapper[5070]: I1213 03:32:31.982689 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 03:32:32 crc kubenswrapper[5070]: I1213 03:32:32.017578 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.267221166 podStartE2EDuration="6.017551904s" podCreationTimestamp="2025-12-13 03:32:26 +0000 UTC" firstStartedPulling="2025-12-13 03:32:27.041974631 +0000 UTC m=+1239.277818177" lastFinishedPulling="2025-12-13 03:32:30.792305369 +0000 UTC m=+1243.028148915" observedRunningTime="2025-12-13 03:32:32.007146918 +0000 UTC m=+1244.242990634" watchObservedRunningTime="2025-12-13 03:32:32.017551904 +0000 UTC m=+1244.253395450" Dec 13 03:32:32 crc kubenswrapper[5070]: I1213 03:32:32.178419 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30d804de-c917-4ff4-9576-3e8410417e0a" path="/var/lib/kubelet/pods/30d804de-c917-4ff4-9576-3e8410417e0a/volumes" Dec 13 03:32:32 crc kubenswrapper[5070]: E1213 03:32:32.739345 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice/crio-a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88\": RecentStats: unable to find data in memory cache]" Dec 13 03:32:36 crc kubenswrapper[5070]: I1213 03:32:36.032912 5070 generic.go:334] "Generic (PLEG): container finished" podID="a485ad64-a9af-43f3-863a-0ee52612df72" containerID="6f644a434838affefee596628949bb8f090c1d7a31bb167fba52cfd8addfe0e9" exitCode=0 Dec 13 03:32:36 crc kubenswrapper[5070]: I1213 03:32:36.033014 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n59t8" event={"ID":"a485ad64-a9af-43f3-863a-0ee52612df72","Type":"ContainerDied","Data":"6f644a434838affefee596628949bb8f090c1d7a31bb167fba52cfd8addfe0e9"} Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.280805 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.281155 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.451679 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.489129 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-combined-ca-bundle\") pod \"a485ad64-a9af-43f3-863a-0ee52612df72\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.489216 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-scripts\") pod \"a485ad64-a9af-43f3-863a-0ee52612df72\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.489315 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-config-data\") pod \"a485ad64-a9af-43f3-863a-0ee52612df72\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.489527 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-927z9\" (UniqueName: \"kubernetes.io/projected/a485ad64-a9af-43f3-863a-0ee52612df72-kube-api-access-927z9\") pod \"a485ad64-a9af-43f3-863a-0ee52612df72\" (UID: \"a485ad64-a9af-43f3-863a-0ee52612df72\") " Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.500838 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a485ad64-a9af-43f3-863a-0ee52612df72-kube-api-access-927z9" (OuterVolumeSpecName: "kube-api-access-927z9") pod "a485ad64-a9af-43f3-863a-0ee52612df72" (UID: "a485ad64-a9af-43f3-863a-0ee52612df72"). InnerVolumeSpecName "kube-api-access-927z9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.500840 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-scripts" (OuterVolumeSpecName: "scripts") pod "a485ad64-a9af-43f3-863a-0ee52612df72" (UID: "a485ad64-a9af-43f3-863a-0ee52612df72"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.521637 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a485ad64-a9af-43f3-863a-0ee52612df72" (UID: "a485ad64-a9af-43f3-863a-0ee52612df72"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.554608 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-config-data" (OuterVolumeSpecName: "config-data") pod "a485ad64-a9af-43f3-863a-0ee52612df72" (UID: "a485ad64-a9af-43f3-863a-0ee52612df72"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.591796 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.591841 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.591855 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-927z9\" (UniqueName: \"kubernetes.io/projected/a485ad64-a9af-43f3-863a-0ee52612df72-kube-api-access-927z9\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:37 crc kubenswrapper[5070]: I1213 03:32:37.591869 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a485ad64-a9af-43f3-863a-0ee52612df72-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.052743 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n59t8" event={"ID":"a485ad64-a9af-43f3-863a-0ee52612df72","Type":"ContainerDied","Data":"3344a5addf1882e512a36b170e2862957bae03ff1b8bf899d6fa7a374d1fd041"} Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.052799 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3344a5addf1882e512a36b170e2862957bae03ff1b8bf899d6fa7a374d1fd041" Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.052807 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n59t8" Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.249640 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.249878 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="9e4432dc-3e78-4733-9bfd-4ecb88a9d10f" containerName="nova-scheduler-scheduler" containerID="cri-o://ae46931758d5573f0f2b088f2b02e634f877291a3e5a95c0e80f3046c28818e7" gracePeriod=30 Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.268145 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.268387 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerName="nova-api-log" containerID="cri-o://1db10164433b257453c495542b42d4ef9e6580becd4440d018cde371b3c1eef8" gracePeriod=30 Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.268474 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerName="nova-api-api" containerID="cri-o://dc736f0188e92c287f63148ccfcce62053581e40bcdec8b4958dacfd72f179d9" gracePeriod=30 Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.281241 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.184:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.281247 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.184:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.331985 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.332341 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-log" containerID="cri-o://63e108499b04dc6fbe6906f6d963a0986409cd78dbccccdac7b83c64d0edfa9f" gracePeriod=30 Dec 13 03:32:38 crc kubenswrapper[5070]: I1213 03:32:38.332679 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-metadata" containerID="cri-o://c51073fd26d422ecab3a1776e9a7baa263de6d8ffc5084a6968c64056239c644" gracePeriod=30 Dec 13 03:32:39 crc kubenswrapper[5070]: I1213 03:32:39.065651 5070 generic.go:334] "Generic (PLEG): container finished" podID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerID="1db10164433b257453c495542b42d4ef9e6580becd4440d018cde371b3c1eef8" exitCode=143 Dec 13 03:32:39 crc kubenswrapper[5070]: I1213 03:32:39.065734 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aab1756e-d68e-4718-b56a-59bf625e16bf","Type":"ContainerDied","Data":"1db10164433b257453c495542b42d4ef9e6580becd4440d018cde371b3c1eef8"} Dec 13 03:32:39 crc kubenswrapper[5070]: I1213 03:32:39.067677 5070 generic.go:334] "Generic (PLEG): container finished" podID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerID="63e108499b04dc6fbe6906f6d963a0986409cd78dbccccdac7b83c64d0edfa9f" exitCode=143 Dec 13 03:32:39 crc kubenswrapper[5070]: I1213 03:32:39.067708 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c574c806-7eda-4898-a3d7-92e5a80f9950","Type":"ContainerDied","Data":"63e108499b04dc6fbe6906f6d963a0986409cd78dbccccdac7b83c64d0edfa9f"} Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.078860 5070 generic.go:334] "Generic (PLEG): container finished" podID="9e4432dc-3e78-4733-9bfd-4ecb88a9d10f" containerID="ae46931758d5573f0f2b088f2b02e634f877291a3e5a95c0e80f3046c28818e7" exitCode=0 Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.078952 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f","Type":"ContainerDied","Data":"ae46931758d5573f0f2b088f2b02e634f877291a3e5a95c0e80f3046c28818e7"} Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.267219 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.337552 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mn544\" (UniqueName: \"kubernetes.io/projected/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-kube-api-access-mn544\") pod \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.337820 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-config-data\") pod \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.337901 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-combined-ca-bundle\") pod \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\" (UID: \"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f\") " Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.347631 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-kube-api-access-mn544" (OuterVolumeSpecName: "kube-api-access-mn544") pod "9e4432dc-3e78-4733-9bfd-4ecb88a9d10f" (UID: "9e4432dc-3e78-4733-9bfd-4ecb88a9d10f"). InnerVolumeSpecName "kube-api-access-mn544". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.379914 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-config-data" (OuterVolumeSpecName: "config-data") pod "9e4432dc-3e78-4733-9bfd-4ecb88a9d10f" (UID: "9e4432dc-3e78-4733-9bfd-4ecb88a9d10f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.384485 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e4432dc-3e78-4733-9bfd-4ecb88a9d10f" (UID: "9e4432dc-3e78-4733-9bfd-4ecb88a9d10f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.440518 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.440556 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mn544\" (UniqueName: \"kubernetes.io/projected/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-kube-api-access-mn544\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:40 crc kubenswrapper[5070]: I1213 03:32:40.440568 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.088967 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9e4432dc-3e78-4733-9bfd-4ecb88a9d10f","Type":"ContainerDied","Data":"59418cac85b4bbe1361814f05c85f40f650c6015a7eca1fbd96bd8bfd165eadc"} Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.089024 5070 scope.go:117] "RemoveContainer" containerID="ae46931758d5573f0f2b088f2b02e634f877291a3e5a95c0e80f3046c28818e7" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.089030 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.121682 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.131181 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.141650 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:32:41 crc kubenswrapper[5070]: E1213 03:32:41.142079 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a485ad64-a9af-43f3-863a-0ee52612df72" containerName="nova-manage" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.142102 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="a485ad64-a9af-43f3-863a-0ee52612df72" containerName="nova-manage" Dec 13 03:32:41 crc kubenswrapper[5070]: E1213 03:32:41.142125 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30d804de-c917-4ff4-9576-3e8410417e0a" containerName="init" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.142132 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="30d804de-c917-4ff4-9576-3e8410417e0a" containerName="init" Dec 13 03:32:41 crc kubenswrapper[5070]: E1213 03:32:41.142156 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e4432dc-3e78-4733-9bfd-4ecb88a9d10f" containerName="nova-scheduler-scheduler" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.142163 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e4432dc-3e78-4733-9bfd-4ecb88a9d10f" containerName="nova-scheduler-scheduler" Dec 13 03:32:41 crc kubenswrapper[5070]: E1213 03:32:41.142180 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30d804de-c917-4ff4-9576-3e8410417e0a" containerName="dnsmasq-dns" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.142187 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="30d804de-c917-4ff4-9576-3e8410417e0a" containerName="dnsmasq-dns" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.142367 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="a485ad64-a9af-43f3-863a-0ee52612df72" containerName="nova-manage" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.142387 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e4432dc-3e78-4733-9bfd-4ecb88a9d10f" containerName="nova-scheduler-scheduler" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.142412 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="30d804de-c917-4ff4-9576-3e8410417e0a" containerName="dnsmasq-dns" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.143381 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.145352 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.153456 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.154604 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edbeeef8-80ad-4b02-bc7b-988cdec64a99-config-data\") pod \"nova-scheduler-0\" (UID: \"edbeeef8-80ad-4b02-bc7b-988cdec64a99\") " pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.154765 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edbeeef8-80ad-4b02-bc7b-988cdec64a99-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"edbeeef8-80ad-4b02-bc7b-988cdec64a99\") " pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.154797 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt6k5\" (UniqueName: \"kubernetes.io/projected/edbeeef8-80ad-4b02-bc7b-988cdec64a99-kube-api-access-dt6k5\") pod \"nova-scheduler-0\" (UID: \"edbeeef8-80ad-4b02-bc7b-988cdec64a99\") " pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.256696 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edbeeef8-80ad-4b02-bc7b-988cdec64a99-config-data\") pod \"nova-scheduler-0\" (UID: \"edbeeef8-80ad-4b02-bc7b-988cdec64a99\") " pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.256827 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edbeeef8-80ad-4b02-bc7b-988cdec64a99-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"edbeeef8-80ad-4b02-bc7b-988cdec64a99\") " pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.256864 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt6k5\" (UniqueName: \"kubernetes.io/projected/edbeeef8-80ad-4b02-bc7b-988cdec64a99-kube-api-access-dt6k5\") pod \"nova-scheduler-0\" (UID: \"edbeeef8-80ad-4b02-bc7b-988cdec64a99\") " pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.260847 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edbeeef8-80ad-4b02-bc7b-988cdec64a99-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"edbeeef8-80ad-4b02-bc7b-988cdec64a99\") " pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.260867 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edbeeef8-80ad-4b02-bc7b-988cdec64a99-config-data\") pod \"nova-scheduler-0\" (UID: \"edbeeef8-80ad-4b02-bc7b-988cdec64a99\") " pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.277050 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt6k5\" (UniqueName: \"kubernetes.io/projected/edbeeef8-80ad-4b02-bc7b-988cdec64a99-kube-api-access-dt6k5\") pod \"nova-scheduler-0\" (UID: \"edbeeef8-80ad-4b02-bc7b-988cdec64a99\") " pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.461040 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.473691 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.176:8775/\": read tcp 10.217.0.2:39004->10.217.0.176:8775: read: connection reset by peer" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.473698 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.176:8775/\": read tcp 10.217.0.2:38998->10.217.0.176:8775: read: connection reset by peer" Dec 13 03:32:41 crc kubenswrapper[5070]: I1213 03:32:41.737468 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 03:32:41 crc kubenswrapper[5070]: W1213 03:32:41.771123 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podedbeeef8_80ad_4b02_bc7b_988cdec64a99.slice/crio-4a735b23b998b0a4c023d4e845c2c6b2bf79d8223b3524a001c4e5af5138d433 WatchSource:0}: Error finding container 4a735b23b998b0a4c023d4e845c2c6b2bf79d8223b3524a001c4e5af5138d433: Status 404 returned error can't find the container with id 4a735b23b998b0a4c023d4e845c2c6b2bf79d8223b3524a001c4e5af5138d433 Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.100756 5070 generic.go:334] "Generic (PLEG): container finished" podID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerID="c51073fd26d422ecab3a1776e9a7baa263de6d8ffc5084a6968c64056239c644" exitCode=0 Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.100836 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c574c806-7eda-4898-a3d7-92e5a80f9950","Type":"ContainerDied","Data":"c51073fd26d422ecab3a1776e9a7baa263de6d8ffc5084a6968c64056239c644"} Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.103932 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"edbeeef8-80ad-4b02-bc7b-988cdec64a99","Type":"ContainerStarted","Data":"ffdb92fa00667fc66400f8bd1b51a8a538821f419d72c1f9395334a7aaa19842"} Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.104064 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"edbeeef8-80ad-4b02-bc7b-988cdec64a99","Type":"ContainerStarted","Data":"4a735b23b998b0a4c023d4e845c2c6b2bf79d8223b3524a001c4e5af5138d433"} Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.127832 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.127799148 podStartE2EDuration="1.127799148s" podCreationTimestamp="2025-12-13 03:32:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:32:42.121433994 +0000 UTC m=+1254.357277570" watchObservedRunningTime="2025-12-13 03:32:42.127799148 +0000 UTC m=+1254.363642764" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.182630 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e4432dc-3e78-4733-9bfd-4ecb88a9d10f" path="/var/lib/kubelet/pods/9e4432dc-3e78-4733-9bfd-4ecb88a9d10f/volumes" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.520647 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.580632 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-nova-metadata-tls-certs\") pod \"c574c806-7eda-4898-a3d7-92e5a80f9950\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.581088 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-config-data\") pod \"c574c806-7eda-4898-a3d7-92e5a80f9950\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.581145 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-combined-ca-bundle\") pod \"c574c806-7eda-4898-a3d7-92e5a80f9950\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.581282 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c574c806-7eda-4898-a3d7-92e5a80f9950-logs\") pod \"c574c806-7eda-4898-a3d7-92e5a80f9950\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.581352 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxmmd\" (UniqueName: \"kubernetes.io/projected/c574c806-7eda-4898-a3d7-92e5a80f9950-kube-api-access-sxmmd\") pod \"c574c806-7eda-4898-a3d7-92e5a80f9950\" (UID: \"c574c806-7eda-4898-a3d7-92e5a80f9950\") " Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.581866 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c574c806-7eda-4898-a3d7-92e5a80f9950-logs" (OuterVolumeSpecName: "logs") pod "c574c806-7eda-4898-a3d7-92e5a80f9950" (UID: "c574c806-7eda-4898-a3d7-92e5a80f9950"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.594352 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c574c806-7eda-4898-a3d7-92e5a80f9950-kube-api-access-sxmmd" (OuterVolumeSpecName: "kube-api-access-sxmmd") pod "c574c806-7eda-4898-a3d7-92e5a80f9950" (UID: "c574c806-7eda-4898-a3d7-92e5a80f9950"). InnerVolumeSpecName "kube-api-access-sxmmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.632562 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c574c806-7eda-4898-a3d7-92e5a80f9950" (UID: "c574c806-7eda-4898-a3d7-92e5a80f9950"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.637647 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-config-data" (OuterVolumeSpecName: "config-data") pod "c574c806-7eda-4898-a3d7-92e5a80f9950" (UID: "c574c806-7eda-4898-a3d7-92e5a80f9950"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.669503 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c574c806-7eda-4898-a3d7-92e5a80f9950" (UID: "c574c806-7eda-4898-a3d7-92e5a80f9950"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.684905 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxmmd\" (UniqueName: \"kubernetes.io/projected/c574c806-7eda-4898-a3d7-92e5a80f9950-kube-api-access-sxmmd\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.684960 5070 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.684978 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.684988 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c574c806-7eda-4898-a3d7-92e5a80f9950-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:42 crc kubenswrapper[5070]: I1213 03:32:42.684999 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c574c806-7eda-4898-a3d7-92e5a80f9950-logs\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:42 crc kubenswrapper[5070]: E1213 03:32:42.994080 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d71c72d_4599_48e7_8c31_63f6968aacb2.slice/crio-a86ed279fdad21653609fc09f712450947d9082e3611c9069cd21b6241a1fa88\": RecentStats: unable to find data in memory cache]" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.123039 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c574c806-7eda-4898-a3d7-92e5a80f9950","Type":"ContainerDied","Data":"9127d612297935ce4cc289c62c2e187ee51993a428882923ec353f85b2bab2e5"} Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.123095 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.123117 5070 scope.go:117] "RemoveContainer" containerID="c51073fd26d422ecab3a1776e9a7baa263de6d8ffc5084a6968c64056239c644" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.147382 5070 scope.go:117] "RemoveContainer" containerID="63e108499b04dc6fbe6906f6d963a0986409cd78dbccccdac7b83c64d0edfa9f" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.180575 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.187875 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.205303 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:32:43 crc kubenswrapper[5070]: E1213 03:32:43.205876 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-log" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.205944 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-log" Dec 13 03:32:43 crc kubenswrapper[5070]: E1213 03:32:43.206027 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-metadata" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.206079 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-metadata" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.210846 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-log" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.210912 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" containerName="nova-metadata-metadata" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.212210 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.219226 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.220041 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.231528 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.396948 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-config-data\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.397346 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.397610 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lszl4\" (UniqueName: \"kubernetes.io/projected/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-kube-api-access-lszl4\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.397818 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-logs\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.398049 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.499787 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-logs\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.500258 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.500543 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-config-data\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.500722 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.500895 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lszl4\" (UniqueName: \"kubernetes.io/projected/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-kube-api-access-lszl4\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.500391 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-logs\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.505091 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.505117 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-config-data\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.523477 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.535358 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lszl4\" (UniqueName: \"kubernetes.io/projected/a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01-kube-api-access-lszl4\") pod \"nova-metadata-0\" (UID: \"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01\") " pod="openstack/nova-metadata-0" Dec 13 03:32:43 crc kubenswrapper[5070]: I1213 03:32:43.829309 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.135266 5070 generic.go:334] "Generic (PLEG): container finished" podID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerID="dc736f0188e92c287f63148ccfcce62053581e40bcdec8b4958dacfd72f179d9" exitCode=0 Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.135566 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aab1756e-d68e-4718-b56a-59bf625e16bf","Type":"ContainerDied","Data":"dc736f0188e92c287f63148ccfcce62053581e40bcdec8b4958dacfd72f179d9"} Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.167669 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.180904 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c574c806-7eda-4898-a3d7-92e5a80f9950" path="/var/lib/kubelet/pods/c574c806-7eda-4898-a3d7-92e5a80f9950/volumes" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.211539 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-combined-ca-bundle\") pod \"aab1756e-d68e-4718-b56a-59bf625e16bf\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.211584 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-internal-tls-certs\") pod \"aab1756e-d68e-4718-b56a-59bf625e16bf\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.211661 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-config-data\") pod \"aab1756e-d68e-4718-b56a-59bf625e16bf\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.211721 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7rgf\" (UniqueName: \"kubernetes.io/projected/aab1756e-d68e-4718-b56a-59bf625e16bf-kube-api-access-l7rgf\") pod \"aab1756e-d68e-4718-b56a-59bf625e16bf\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.211754 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aab1756e-d68e-4718-b56a-59bf625e16bf-logs\") pod \"aab1756e-d68e-4718-b56a-59bf625e16bf\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.211849 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-public-tls-certs\") pod \"aab1756e-d68e-4718-b56a-59bf625e16bf\" (UID: \"aab1756e-d68e-4718-b56a-59bf625e16bf\") " Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.212288 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aab1756e-d68e-4718-b56a-59bf625e16bf-logs" (OuterVolumeSpecName: "logs") pod "aab1756e-d68e-4718-b56a-59bf625e16bf" (UID: "aab1756e-d68e-4718-b56a-59bf625e16bf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.221412 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aab1756e-d68e-4718-b56a-59bf625e16bf-kube-api-access-l7rgf" (OuterVolumeSpecName: "kube-api-access-l7rgf") pod "aab1756e-d68e-4718-b56a-59bf625e16bf" (UID: "aab1756e-d68e-4718-b56a-59bf625e16bf"). InnerVolumeSpecName "kube-api-access-l7rgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.236314 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aab1756e-d68e-4718-b56a-59bf625e16bf" (UID: "aab1756e-d68e-4718-b56a-59bf625e16bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.238353 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-config-data" (OuterVolumeSpecName: "config-data") pod "aab1756e-d68e-4718-b56a-59bf625e16bf" (UID: "aab1756e-d68e-4718-b56a-59bf625e16bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.257603 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "aab1756e-d68e-4718-b56a-59bf625e16bf" (UID: "aab1756e-d68e-4718-b56a-59bf625e16bf"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.259350 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "aab1756e-d68e-4718-b56a-59bf625e16bf" (UID: "aab1756e-d68e-4718-b56a-59bf625e16bf"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.313632 5070 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.313968 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.313994 5070 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.314005 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aab1756e-d68e-4718-b56a-59bf625e16bf-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.314017 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7rgf\" (UniqueName: \"kubernetes.io/projected/aab1756e-d68e-4718-b56a-59bf625e16bf-kube-api-access-l7rgf\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.314032 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aab1756e-d68e-4718-b56a-59bf625e16bf-logs\") on node \"crc\" DevicePath \"\"" Dec 13 03:32:44 crc kubenswrapper[5070]: I1213 03:32:44.353875 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 03:32:44 crc kubenswrapper[5070]: W1213 03:32:44.362241 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda37bc3a1_ce6c_4c58_b7d6_cb9d64227a01.slice/crio-d187e4a19a4dc24a8764d7a542e2bc53335a7f6bb9de02d5ab66c31bad4d6ddc WatchSource:0}: Error finding container d187e4a19a4dc24a8764d7a542e2bc53335a7f6bb9de02d5ab66c31bad4d6ddc: Status 404 returned error can't find the container with id d187e4a19a4dc24a8764d7a542e2bc53335a7f6bb9de02d5ab66c31bad4d6ddc Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.146461 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01","Type":"ContainerStarted","Data":"179367eba6f995eb81efd1a98d27a4e6e65ce950eaeb4d7cfa99f4fbdb833dd3"} Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.146823 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01","Type":"ContainerStarted","Data":"ae0e1c063e543e4245c699b30f7bc4dc62c26bb14da7d85844d5b66de7c9a7bf"} Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.146840 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01","Type":"ContainerStarted","Data":"d187e4a19a4dc24a8764d7a542e2bc53335a7f6bb9de02d5ab66c31bad4d6ddc"} Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.148329 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aab1756e-d68e-4718-b56a-59bf625e16bf","Type":"ContainerDied","Data":"8d663b1f8f24474b7e931b8ae4d99690e269d6cd360b7cf4544df782ad140083"} Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.148361 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.148388 5070 scope.go:117] "RemoveContainer" containerID="dc736f0188e92c287f63148ccfcce62053581e40bcdec8b4958dacfd72f179d9" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.181578 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.181559244 podStartE2EDuration="2.181559244s" podCreationTimestamp="2025-12-13 03:32:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:32:45.172674941 +0000 UTC m=+1257.408518487" watchObservedRunningTime="2025-12-13 03:32:45.181559244 +0000 UTC m=+1257.417402790" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.183978 5070 scope.go:117] "RemoveContainer" containerID="1db10164433b257453c495542b42d4ef9e6580becd4440d018cde371b3c1eef8" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.218495 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.235571 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.245517 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:45 crc kubenswrapper[5070]: E1213 03:32:45.246006 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerName="nova-api-log" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.246031 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerName="nova-api-log" Dec 13 03:32:45 crc kubenswrapper[5070]: E1213 03:32:45.246050 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerName="nova-api-api" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.246058 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerName="nova-api-api" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.246230 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerName="nova-api-log" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.246249 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" containerName="nova-api-api" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.247178 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.249493 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.249506 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.250364 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.268052 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.333848 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-config-data\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.333885 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrzcn\" (UniqueName: \"kubernetes.io/projected/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-kube-api-access-nrzcn\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.333946 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.333980 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.334045 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-public-tls-certs\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.334101 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-logs\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.435836 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-config-data\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.435878 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrzcn\" (UniqueName: \"kubernetes.io/projected/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-kube-api-access-nrzcn\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.435925 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.435952 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.436011 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-public-tls-certs\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.436047 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-logs\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.436403 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-logs\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.440646 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-public-tls-certs\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.440765 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.441064 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-config-data\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.444266 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.481015 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrzcn\" (UniqueName: \"kubernetes.io/projected/01ff34fb-3f08-4462-9ec2-7ed2b05b114b-kube-api-access-nrzcn\") pod \"nova-api-0\" (UID: \"01ff34fb-3f08-4462-9ec2-7ed2b05b114b\") " pod="openstack/nova-api-0" Dec 13 03:32:45 crc kubenswrapper[5070]: I1213 03:32:45.598205 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 03:32:46 crc kubenswrapper[5070]: W1213 03:32:46.085328 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01ff34fb_3f08_4462_9ec2_7ed2b05b114b.slice/crio-f647e5af8e819329127c6905200b27d9234adf937a2689a0d498bb7d490c6684 WatchSource:0}: Error finding container f647e5af8e819329127c6905200b27d9234adf937a2689a0d498bb7d490c6684: Status 404 returned error can't find the container with id f647e5af8e819329127c6905200b27d9234adf937a2689a0d498bb7d490c6684 Dec 13 03:32:46 crc kubenswrapper[5070]: I1213 03:32:46.091120 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 03:32:46 crc kubenswrapper[5070]: I1213 03:32:46.162243 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01ff34fb-3f08-4462-9ec2-7ed2b05b114b","Type":"ContainerStarted","Data":"f647e5af8e819329127c6905200b27d9234adf937a2689a0d498bb7d490c6684"} Dec 13 03:32:46 crc kubenswrapper[5070]: I1213 03:32:46.180468 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aab1756e-d68e-4718-b56a-59bf625e16bf" path="/var/lib/kubelet/pods/aab1756e-d68e-4718-b56a-59bf625e16bf/volumes" Dec 13 03:32:46 crc kubenswrapper[5070]: I1213 03:32:46.462026 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 13 03:32:47 crc kubenswrapper[5070]: I1213 03:32:47.171773 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01ff34fb-3f08-4462-9ec2-7ed2b05b114b","Type":"ContainerStarted","Data":"77a44d22e5ca6d087ebedd4c468fb1abae8c1db24cac91c7718c8c6addc3873a"} Dec 13 03:32:47 crc kubenswrapper[5070]: I1213 03:32:47.171821 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"01ff34fb-3f08-4462-9ec2-7ed2b05b114b","Type":"ContainerStarted","Data":"c595b3bf30ac3ba703b1b0fe5f005f7d69b867da08dda619f005451c8ad531c3"} Dec 13 03:32:47 crc kubenswrapper[5070]: I1213 03:32:47.192876 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.192857363 podStartE2EDuration="2.192857363s" podCreationTimestamp="2025-12-13 03:32:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:32:47.190133308 +0000 UTC m=+1259.425976854" watchObservedRunningTime="2025-12-13 03:32:47.192857363 +0000 UTC m=+1259.428700909" Dec 13 03:32:48 crc kubenswrapper[5070]: E1213 03:32:48.201166 5070 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/8ed858cd0987274e0120a6972ca40b8af9e426f777e70184e2dfbe5f2b148498/diff" to get inode usage: stat /var/lib/containers/storage/overlay/8ed858cd0987274e0120a6972ca40b8af9e426f777e70184e2dfbe5f2b148498/diff: no such file or directory, extraDiskErr: Dec 13 03:32:48 crc kubenswrapper[5070]: I1213 03:32:48.829689 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 13 03:32:48 crc kubenswrapper[5070]: I1213 03:32:48.829747 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 13 03:32:51 crc kubenswrapper[5070]: I1213 03:32:51.462276 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 13 03:32:51 crc kubenswrapper[5070]: I1213 03:32:51.500629 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 13 03:32:51 crc kubenswrapper[5070]: I1213 03:32:51.942576 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:32:51 crc kubenswrapper[5070]: I1213 03:32:51.942626 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:32:52 crc kubenswrapper[5070]: I1213 03:32:52.237767 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 13 03:32:53 crc kubenswrapper[5070]: I1213 03:32:53.830477 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 13 03:32:53 crc kubenswrapper[5070]: I1213 03:32:53.830981 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 13 03:32:54 crc kubenswrapper[5070]: I1213 03:32:54.842587 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.187:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 03:32:54 crc kubenswrapper[5070]: I1213 03:32:54.842696 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.187:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 03:32:55 crc kubenswrapper[5070]: I1213 03:32:55.599766 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 03:32:55 crc kubenswrapper[5070]: I1213 03:32:55.600991 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.514330 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gs2wl"] Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.517657 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.532839 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gs2wl"] Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.588594 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.613420 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="01ff34fb-3f08-4462-9ec2-7ed2b05b114b" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.188:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.613714 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="01ff34fb-3f08-4462-9ec2-7ed2b05b114b" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.188:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.653846 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-utilities\") pod \"redhat-operators-gs2wl\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.654336 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-catalog-content\") pod \"redhat-operators-gs2wl\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.654383 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9fbc\" (UniqueName: \"kubernetes.io/projected/547c2dbf-3224-4198-b099-e445f402a373-kube-api-access-p9fbc\") pod \"redhat-operators-gs2wl\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.756058 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-utilities\") pod \"redhat-operators-gs2wl\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.756210 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-catalog-content\") pod \"redhat-operators-gs2wl\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.756264 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9fbc\" (UniqueName: \"kubernetes.io/projected/547c2dbf-3224-4198-b099-e445f402a373-kube-api-access-p9fbc\") pod \"redhat-operators-gs2wl\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.756756 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-utilities\") pod \"redhat-operators-gs2wl\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.757032 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-catalog-content\") pod \"redhat-operators-gs2wl\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.793714 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9fbc\" (UniqueName: \"kubernetes.io/projected/547c2dbf-3224-4198-b099-e445f402a373-kube-api-access-p9fbc\") pod \"redhat-operators-gs2wl\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:56 crc kubenswrapper[5070]: I1213 03:32:56.849085 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:32:57 crc kubenswrapper[5070]: I1213 03:32:57.332915 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gs2wl"] Dec 13 03:32:58 crc kubenswrapper[5070]: I1213 03:32:58.286483 5070 generic.go:334] "Generic (PLEG): container finished" podID="547c2dbf-3224-4198-b099-e445f402a373" containerID="f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9" exitCode=0 Dec 13 03:32:58 crc kubenswrapper[5070]: I1213 03:32:58.286611 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gs2wl" event={"ID":"547c2dbf-3224-4198-b099-e445f402a373","Type":"ContainerDied","Data":"f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9"} Dec 13 03:32:58 crc kubenswrapper[5070]: I1213 03:32:58.289983 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gs2wl" event={"ID":"547c2dbf-3224-4198-b099-e445f402a373","Type":"ContainerStarted","Data":"cd4cb5404b7ffdd5ed0cddadc0c3a01eba500e1cd524334339e9348d352f6e0d"} Dec 13 03:32:59 crc kubenswrapper[5070]: I1213 03:32:59.300640 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gs2wl" event={"ID":"547c2dbf-3224-4198-b099-e445f402a373","Type":"ContainerStarted","Data":"18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709"} Dec 13 03:33:01 crc kubenswrapper[5070]: I1213 03:33:01.321011 5070 generic.go:334] "Generic (PLEG): container finished" podID="547c2dbf-3224-4198-b099-e445f402a373" containerID="18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709" exitCode=0 Dec 13 03:33:01 crc kubenswrapper[5070]: I1213 03:33:01.321080 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gs2wl" event={"ID":"547c2dbf-3224-4198-b099-e445f402a373","Type":"ContainerDied","Data":"18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709"} Dec 13 03:33:02 crc kubenswrapper[5070]: I1213 03:33:02.340476 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gs2wl" event={"ID":"547c2dbf-3224-4198-b099-e445f402a373","Type":"ContainerStarted","Data":"518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607"} Dec 13 03:33:02 crc kubenswrapper[5070]: I1213 03:33:02.386291 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gs2wl" podStartSLOduration=2.794275748 podStartE2EDuration="6.386257728s" podCreationTimestamp="2025-12-13 03:32:56 +0000 UTC" firstStartedPulling="2025-12-13 03:32:58.289157409 +0000 UTC m=+1270.525000955" lastFinishedPulling="2025-12-13 03:33:01.881139389 +0000 UTC m=+1274.116982935" observedRunningTime="2025-12-13 03:33:02.36115516 +0000 UTC m=+1274.596998726" watchObservedRunningTime="2025-12-13 03:33:02.386257728 +0000 UTC m=+1274.622101284" Dec 13 03:33:03 crc kubenswrapper[5070]: I1213 03:33:03.836055 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 13 03:33:03 crc kubenswrapper[5070]: I1213 03:33:03.836403 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 13 03:33:03 crc kubenswrapper[5070]: I1213 03:33:03.844876 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 13 03:33:03 crc kubenswrapper[5070]: I1213 03:33:03.846774 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 13 03:33:05 crc kubenswrapper[5070]: I1213 03:33:05.605870 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 13 03:33:05 crc kubenswrapper[5070]: I1213 03:33:05.606562 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 13 03:33:05 crc kubenswrapper[5070]: I1213 03:33:05.608904 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 13 03:33:05 crc kubenswrapper[5070]: I1213 03:33:05.617107 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 13 03:33:06 crc kubenswrapper[5070]: I1213 03:33:06.379014 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 13 03:33:06 crc kubenswrapper[5070]: I1213 03:33:06.386466 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 13 03:33:06 crc kubenswrapper[5070]: I1213 03:33:06.849670 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:33:06 crc kubenswrapper[5070]: I1213 03:33:06.850096 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:33:07 crc kubenswrapper[5070]: I1213 03:33:07.913852 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gs2wl" podUID="547c2dbf-3224-4198-b099-e445f402a373" containerName="registry-server" probeResult="failure" output=< Dec 13 03:33:07 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 03:33:07 crc kubenswrapper[5070]: > Dec 13 03:33:15 crc kubenswrapper[5070]: I1213 03:33:15.216818 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 03:33:16 crc kubenswrapper[5070]: I1213 03:33:16.150950 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 03:33:16 crc kubenswrapper[5070]: I1213 03:33:16.926000 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:33:16 crc kubenswrapper[5070]: I1213 03:33:16.988088 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:33:17 crc kubenswrapper[5070]: I1213 03:33:17.170798 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gs2wl"] Dec 13 03:33:18 crc kubenswrapper[5070]: I1213 03:33:18.511572 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gs2wl" podUID="547c2dbf-3224-4198-b099-e445f402a373" containerName="registry-server" containerID="cri-o://518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607" gracePeriod=2 Dec 13 03:33:18 crc kubenswrapper[5070]: I1213 03:33:18.989659 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.175471 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-catalog-content\") pod \"547c2dbf-3224-4198-b099-e445f402a373\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.175528 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-utilities\") pod \"547c2dbf-3224-4198-b099-e445f402a373\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.175788 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9fbc\" (UniqueName: \"kubernetes.io/projected/547c2dbf-3224-4198-b099-e445f402a373-kube-api-access-p9fbc\") pod \"547c2dbf-3224-4198-b099-e445f402a373\" (UID: \"547c2dbf-3224-4198-b099-e445f402a373\") " Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.176561 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-utilities" (OuterVolumeSpecName: "utilities") pod "547c2dbf-3224-4198-b099-e445f402a373" (UID: "547c2dbf-3224-4198-b099-e445f402a373"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.185792 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/547c2dbf-3224-4198-b099-e445f402a373-kube-api-access-p9fbc" (OuterVolumeSpecName: "kube-api-access-p9fbc") pod "547c2dbf-3224-4198-b099-e445f402a373" (UID: "547c2dbf-3224-4198-b099-e445f402a373"). InnerVolumeSpecName "kube-api-access-p9fbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.280325 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9fbc\" (UniqueName: \"kubernetes.io/projected/547c2dbf-3224-4198-b099-e445f402a373-kube-api-access-p9fbc\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.280648 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.309075 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "547c2dbf-3224-4198-b099-e445f402a373" (UID: "547c2dbf-3224-4198-b099-e445f402a373"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.382392 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/547c2dbf-3224-4198-b099-e445f402a373-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.519964 5070 generic.go:334] "Generic (PLEG): container finished" podID="547c2dbf-3224-4198-b099-e445f402a373" containerID="518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607" exitCode=0 Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.520012 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gs2wl" event={"ID":"547c2dbf-3224-4198-b099-e445f402a373","Type":"ContainerDied","Data":"518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607"} Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.520048 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gs2wl" event={"ID":"547c2dbf-3224-4198-b099-e445f402a373","Type":"ContainerDied","Data":"cd4cb5404b7ffdd5ed0cddadc0c3a01eba500e1cd524334339e9348d352f6e0d"} Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.520069 5070 scope.go:117] "RemoveContainer" containerID="518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.520075 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gs2wl" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.552771 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gs2wl"] Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.555387 5070 scope.go:117] "RemoveContainer" containerID="18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.559910 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gs2wl"] Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.583094 5070 scope.go:117] "RemoveContainer" containerID="f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.617099 5070 scope.go:117] "RemoveContainer" containerID="518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607" Dec 13 03:33:19 crc kubenswrapper[5070]: E1213 03:33:19.617527 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607\": container with ID starting with 518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607 not found: ID does not exist" containerID="518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.617568 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607"} err="failed to get container status \"518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607\": rpc error: code = NotFound desc = could not find container \"518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607\": container with ID starting with 518cb06c8ab3732e4c6c05f5835d93f53baeca25ea07c63a39974da8c0a72607 not found: ID does not exist" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.617594 5070 scope.go:117] "RemoveContainer" containerID="18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709" Dec 13 03:33:19 crc kubenswrapper[5070]: E1213 03:33:19.618033 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709\": container with ID starting with 18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709 not found: ID does not exist" containerID="18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.618079 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709"} err="failed to get container status \"18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709\": rpc error: code = NotFound desc = could not find container \"18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709\": container with ID starting with 18da68c176e75ff62a7ff4d7bf03046a1c13f81d3020ec2e50ad23069b40f709 not found: ID does not exist" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.618107 5070 scope.go:117] "RemoveContainer" containerID="f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9" Dec 13 03:33:19 crc kubenswrapper[5070]: E1213 03:33:19.618376 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9\": container with ID starting with f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9 not found: ID does not exist" containerID="f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9" Dec 13 03:33:19 crc kubenswrapper[5070]: I1213 03:33:19.618401 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9"} err="failed to get container status \"f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9\": rpc error: code = NotFound desc = could not find container \"f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9\": container with ID starting with f4572e3f91b097e58756c04d237958a1eb596fc532e2378309992f0e2ca69af9 not found: ID does not exist" Dec 13 03:33:20 crc kubenswrapper[5070]: I1213 03:33:20.145796 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" containerName="rabbitmq" containerID="cri-o://33003ab2fb9eb934abf617d765789be5f82404c9f0f62b664f7771d0588edc3c" gracePeriod=604796 Dec 13 03:33:20 crc kubenswrapper[5070]: I1213 03:33:20.177520 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="547c2dbf-3224-4198-b099-e445f402a373" path="/var/lib/kubelet/pods/547c2dbf-3224-4198-b099-e445f402a373/volumes" Dec 13 03:33:20 crc kubenswrapper[5070]: I1213 03:33:20.606540 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="d669856e-7406-451a-825e-9de1fc76f8b2" containerName="rabbitmq" containerID="cri-o://ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26" gracePeriod=604796 Dec 13 03:33:21 crc kubenswrapper[5070]: I1213 03:33:21.943264 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:33:21 crc kubenswrapper[5070]: I1213 03:33:21.944308 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:33:21 crc kubenswrapper[5070]: I1213 03:33:21.944481 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:33:21 crc kubenswrapper[5070]: I1213 03:33:21.945535 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0e606491cad6e3bd7c4c4b366efe48c2688d128966fcc1905da9fc58a2f148b4"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 03:33:21 crc kubenswrapper[5070]: I1213 03:33:21.945719 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://0e606491cad6e3bd7c4c4b366efe48c2688d128966fcc1905da9fc58a2f148b4" gracePeriod=600 Dec 13 03:33:22 crc kubenswrapper[5070]: I1213 03:33:22.555885 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="0e606491cad6e3bd7c4c4b366efe48c2688d128966fcc1905da9fc58a2f148b4" exitCode=0 Dec 13 03:33:22 crc kubenswrapper[5070]: I1213 03:33:22.555935 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"0e606491cad6e3bd7c4c4b366efe48c2688d128966fcc1905da9fc58a2f148b4"} Dec 13 03:33:22 crc kubenswrapper[5070]: I1213 03:33:22.556283 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"cca15ae9a649254597cf251cc63ec7ab70879a87c44cea845775d4020af3f28d"} Dec 13 03:33:22 crc kubenswrapper[5070]: I1213 03:33:22.556301 5070 scope.go:117] "RemoveContainer" containerID="23a1c6bfa0fe9bf90a6fdead1bc43aade8cb45302f3b76d55b4d7f69ae3c4750" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.186324 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.97:5671: connect: connection refused" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.461620 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="d669856e-7406-451a-825e-9de1fc76f8b2" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.662384 5070 generic.go:334] "Generic (PLEG): container finished" podID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" containerID="33003ab2fb9eb934abf617d765789be5f82404c9f0f62b664f7771d0588edc3c" exitCode=0 Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.662503 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9dd13bd9-bfbd-4f80-b334-d8b959a6187d","Type":"ContainerDied","Data":"33003ab2fb9eb934abf617d765789be5f82404c9f0f62b664f7771d0588edc3c"} Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.662777 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9dd13bd9-bfbd-4f80-b334-d8b959a6187d","Type":"ContainerDied","Data":"ef6ffc167256dc2ba02949de0fea32f46eb3d6e61e4d9d993516512c1fb024a3"} Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.662794 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef6ffc167256dc2ba02949de0fea32f46eb3d6e61e4d9d993516512c1fb024a3" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.719273 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.880932 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-tls\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.880984 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-pod-info\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.881032 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlsq6\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-kube-api-access-wlsq6\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.881095 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-confd\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.881161 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-plugins\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.881222 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.881273 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-erlang-cookie-secret\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.881310 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-erlang-cookie\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.881343 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-config-data\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.881390 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-server-conf\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.881406 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-plugins-conf\") pod \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\" (UID: \"9dd13bd9-bfbd-4f80-b334-d8b959a6187d\") " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.882311 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.882710 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.883964 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.887818 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.889253 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-pod-info" (OuterVolumeSpecName: "pod-info") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.891258 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.893570 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.904927 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-kube-api-access-wlsq6" (OuterVolumeSpecName: "kube-api-access-wlsq6") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "kube-api-access-wlsq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.924506 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-config-data" (OuterVolumeSpecName: "config-data") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.947119 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-server-conf" (OuterVolumeSpecName: "server-conf") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.983997 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlsq6\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-kube-api-access-wlsq6\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.984046 5070 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.984077 5070 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.984089 5070 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.984102 5070 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.984115 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.984130 5070 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-server-conf\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.984140 5070 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.984150 5070 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:26 crc kubenswrapper[5070]: I1213 03:33:26.984164 5070 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-pod-info\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.011132 5070 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.025626 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "9dd13bd9-bfbd-4f80-b334-d8b959a6187d" (UID: "9dd13bd9-bfbd-4f80-b334-d8b959a6187d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.085549 5070 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.085599 5070 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9dd13bd9-bfbd-4f80-b334-d8b959a6187d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.151187 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.289541 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgtbx\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-kube-api-access-mgtbx\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.289591 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d669856e-7406-451a-825e-9de1fc76f8b2-erlang-cookie-secret\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.289673 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d669856e-7406-451a-825e-9de1fc76f8b2-pod-info\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.289720 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-erlang-cookie\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.289774 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-server-conf\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.289825 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.289936 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-confd\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.289965 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-plugins\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.290005 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-config-data\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.290072 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-plugins-conf\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.290105 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-tls\") pod \"d669856e-7406-451a-825e-9de1fc76f8b2\" (UID: \"d669856e-7406-451a-825e-9de1fc76f8b2\") " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.294091 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.294386 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.295020 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.311735 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-kube-api-access-mgtbx" (OuterVolumeSpecName: "kube-api-access-mgtbx") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "kube-api-access-mgtbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.315185 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d669856e-7406-451a-825e-9de1fc76f8b2-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.319610 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.321720 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.322627 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d669856e-7406-451a-825e-9de1fc76f8b2-pod-info" (OuterVolumeSpecName: "pod-info") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.350864 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-config-data" (OuterVolumeSpecName: "config-data") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.370520 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-server-conf" (OuterVolumeSpecName: "server-conf") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.395587 5070 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.395630 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.395642 5070 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.395654 5070 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.395679 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgtbx\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-kube-api-access-mgtbx\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.395689 5070 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d669856e-7406-451a-825e-9de1fc76f8b2-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.395697 5070 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d669856e-7406-451a-825e-9de1fc76f8b2-pod-info\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.395706 5070 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.395714 5070 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d669856e-7406-451a-825e-9de1fc76f8b2-server-conf\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.395735 5070 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.483941 5070 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.500494 5070 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.536318 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d669856e-7406-451a-825e-9de1fc76f8b2" (UID: "d669856e-7406-451a-825e-9de1fc76f8b2"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.602353 5070 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d669856e-7406-451a-825e-9de1fc76f8b2-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.673214 5070 generic.go:334] "Generic (PLEG): container finished" podID="d669856e-7406-451a-825e-9de1fc76f8b2" containerID="ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26" exitCode=0 Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.673264 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d669856e-7406-451a-825e-9de1fc76f8b2","Type":"ContainerDied","Data":"ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26"} Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.673305 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.673320 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d669856e-7406-451a-825e-9de1fc76f8b2","Type":"ContainerDied","Data":"f7173909be8baba2717545f4dee5f99e2a66454c88fefd7520a3aa462df2a51d"} Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.673345 5070 scope.go:117] "RemoveContainer" containerID="ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.673338 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.706483 5070 scope.go:117] "RemoveContainer" containerID="a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.717095 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.728347 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.734256 5070 scope.go:117] "RemoveContainer" containerID="ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26" Dec 13 03:33:27 crc kubenswrapper[5070]: E1213 03:33:27.735060 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26\": container with ID starting with ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26 not found: ID does not exist" containerID="ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.735161 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26"} err="failed to get container status \"ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26\": rpc error: code = NotFound desc = could not find container \"ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26\": container with ID starting with ea3f2b7f506ec15d77b16759183c4887b690d74931302844a22ffc6494accc26 not found: ID does not exist" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.735213 5070 scope.go:117] "RemoveContainer" containerID="a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782" Dec 13 03:33:27 crc kubenswrapper[5070]: E1213 03:33:27.735802 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782\": container with ID starting with a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782 not found: ID does not exist" containerID="a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.735863 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782"} err="failed to get container status \"a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782\": rpc error: code = NotFound desc = could not find container \"a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782\": container with ID starting with a06d4aee3203cbcbcb26562e632af6ca69d14932f595072336c8a998466e2782 not found: ID does not exist" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.741103 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.796578 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825097 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 03:33:27 crc kubenswrapper[5070]: E1213 03:33:27.825515 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="547c2dbf-3224-4198-b099-e445f402a373" containerName="extract-utilities" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825530 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="547c2dbf-3224-4198-b099-e445f402a373" containerName="extract-utilities" Dec 13 03:33:27 crc kubenswrapper[5070]: E1213 03:33:27.825537 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" containerName="setup-container" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825543 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" containerName="setup-container" Dec 13 03:33:27 crc kubenswrapper[5070]: E1213 03:33:27.825557 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="547c2dbf-3224-4198-b099-e445f402a373" containerName="registry-server" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825563 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="547c2dbf-3224-4198-b099-e445f402a373" containerName="registry-server" Dec 13 03:33:27 crc kubenswrapper[5070]: E1213 03:33:27.825571 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" containerName="rabbitmq" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825577 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" containerName="rabbitmq" Dec 13 03:33:27 crc kubenswrapper[5070]: E1213 03:33:27.825592 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d669856e-7406-451a-825e-9de1fc76f8b2" containerName="rabbitmq" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825597 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d669856e-7406-451a-825e-9de1fc76f8b2" containerName="rabbitmq" Dec 13 03:33:27 crc kubenswrapper[5070]: E1213 03:33:27.825614 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="547c2dbf-3224-4198-b099-e445f402a373" containerName="extract-content" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825620 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="547c2dbf-3224-4198-b099-e445f402a373" containerName="extract-content" Dec 13 03:33:27 crc kubenswrapper[5070]: E1213 03:33:27.825633 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d669856e-7406-451a-825e-9de1fc76f8b2" containerName="setup-container" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825638 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d669856e-7406-451a-825e-9de1fc76f8b2" containerName="setup-container" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825808 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d669856e-7406-451a-825e-9de1fc76f8b2" containerName="rabbitmq" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825820 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" containerName="rabbitmq" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.825838 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="547c2dbf-3224-4198-b099-e445f402a373" containerName="registry-server" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.826849 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.830350 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.830526 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.830641 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.830691 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.830816 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.831126 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-qjxjw" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.831194 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.864920 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.866464 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.869798 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.870807 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-hcv2k" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.871013 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.871141 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.871264 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.871486 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.872109 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.879038 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 03:33:27 crc kubenswrapper[5070]: I1213 03:33:27.892062 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013679 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013736 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013775 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013791 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/965e1f2f-3f50-4411-8006-4db60cb5a504-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013813 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013829 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/965e1f2f-3f50-4411-8006-4db60cb5a504-server-conf\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013844 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013860 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46b708f1-eabc-44f5-8388-8e6b42d66fd0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013876 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tmth\" (UniqueName: \"kubernetes.io/projected/965e1f2f-3f50-4411-8006-4db60cb5a504-kube-api-access-4tmth\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013895 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013909 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013929 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9sw9\" (UniqueName: \"kubernetes.io/projected/46b708f1-eabc-44f5-8388-8e6b42d66fd0-kube-api-access-h9sw9\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013947 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.013963 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46b708f1-eabc-44f5-8388-8e6b42d66fd0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.014022 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46b708f1-eabc-44f5-8388-8e6b42d66fd0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.014040 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/965e1f2f-3f50-4411-8006-4db60cb5a504-pod-info\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.014059 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46b708f1-eabc-44f5-8388-8e6b42d66fd0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.014081 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.014114 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/965e1f2f-3f50-4411-8006-4db60cb5a504-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.014138 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46b708f1-eabc-44f5-8388-8e6b42d66fd0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.014155 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/965e1f2f-3f50-4411-8006-4db60cb5a504-config-data\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.014172 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.115679 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.115783 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/965e1f2f-3f50-4411-8006-4db60cb5a504-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.115835 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46b708f1-eabc-44f5-8388-8e6b42d66fd0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.115868 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.115892 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/965e1f2f-3f50-4411-8006-4db60cb5a504-config-data\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.115944 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.115985 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116049 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116074 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/965e1f2f-3f50-4411-8006-4db60cb5a504-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116109 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116136 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/965e1f2f-3f50-4411-8006-4db60cb5a504-server-conf\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116159 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116180 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46b708f1-eabc-44f5-8388-8e6b42d66fd0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116207 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tmth\" (UniqueName: \"kubernetes.io/projected/965e1f2f-3f50-4411-8006-4db60cb5a504-kube-api-access-4tmth\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116237 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116261 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116281 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9sw9\" (UniqueName: \"kubernetes.io/projected/46b708f1-eabc-44f5-8388-8e6b42d66fd0-kube-api-access-h9sw9\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116310 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116332 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46b708f1-eabc-44f5-8388-8e6b42d66fd0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116383 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46b708f1-eabc-44f5-8388-8e6b42d66fd0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116405 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/965e1f2f-3f50-4411-8006-4db60cb5a504-pod-info\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116431 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46b708f1-eabc-44f5-8388-8e6b42d66fd0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116822 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/965e1f2f-3f50-4411-8006-4db60cb5a504-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.117111 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.117204 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.117214 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/965e1f2f-3f50-4411-8006-4db60cb5a504-config-data\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.117547 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.117548 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46b708f1-eabc-44f5-8388-8e6b42d66fd0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.116208 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.118066 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.118390 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/46b708f1-eabc-44f5-8388-8e6b42d66fd0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.118942 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/965e1f2f-3f50-4411-8006-4db60cb5a504-server-conf\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.119734 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.121484 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.121505 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46b708f1-eabc-44f5-8388-8e6b42d66fd0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.122109 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.122738 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/965e1f2f-3f50-4411-8006-4db60cb5a504-pod-info\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.123296 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/965e1f2f-3f50-4411-8006-4db60cb5a504-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.123692 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/965e1f2f-3f50-4411-8006-4db60cb5a504-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.125902 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46b708f1-eabc-44f5-8388-8e6b42d66fd0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.128143 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46b708f1-eabc-44f5-8388-8e6b42d66fd0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.129207 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46b708f1-eabc-44f5-8388-8e6b42d66fd0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.139844 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9sw9\" (UniqueName: \"kubernetes.io/projected/46b708f1-eabc-44f5-8388-8e6b42d66fd0-kube-api-access-h9sw9\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.140652 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tmth\" (UniqueName: \"kubernetes.io/projected/965e1f2f-3f50-4411-8006-4db60cb5a504-kube-api-access-4tmth\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.153323 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"46b708f1-eabc-44f5-8388-8e6b42d66fd0\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.161358 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.178136 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dd13bd9-bfbd-4f80-b334-d8b959a6187d" path="/var/lib/kubelet/pods/9dd13bd9-bfbd-4f80-b334-d8b959a6187d/volumes" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.179888 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d669856e-7406-451a-825e-9de1fc76f8b2" path="/var/lib/kubelet/pods/d669856e-7406-451a-825e-9de1fc76f8b2/volumes" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.192680 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"965e1f2f-3f50-4411-8006-4db60cb5a504\") " pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.196925 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.660862 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.688378 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"46b708f1-eabc-44f5-8388-8e6b42d66fd0","Type":"ContainerStarted","Data":"dd2e777a580d0f5511f9d4a103c86045ac6675433be7b977d56eb85a2675fd70"} Dec 13 03:33:28 crc kubenswrapper[5070]: I1213 03:33:28.728170 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 03:33:28 crc kubenswrapper[5070]: W1213 03:33:28.740772 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod965e1f2f_3f50_4411_8006_4db60cb5a504.slice/crio-3a79e01aaffea4f3346e376fcac094246ac3840619817d12bcd3b48a1c9547a9 WatchSource:0}: Error finding container 3a79e01aaffea4f3346e376fcac094246ac3840619817d12bcd3b48a1c9547a9: Status 404 returned error can't find the container with id 3a79e01aaffea4f3346e376fcac094246ac3840619817d12bcd3b48a1c9547a9 Dec 13 03:33:29 crc kubenswrapper[5070]: I1213 03:33:29.706585 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"965e1f2f-3f50-4411-8006-4db60cb5a504","Type":"ContainerStarted","Data":"3a79e01aaffea4f3346e376fcac094246ac3840619817d12bcd3b48a1c9547a9"} Dec 13 03:33:30 crc kubenswrapper[5070]: I1213 03:33:30.718566 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"46b708f1-eabc-44f5-8388-8e6b42d66fd0","Type":"ContainerStarted","Data":"2ce4bfdc86dd74f8f61529fba986df028e7efbfdee01497c6203c9377b7db38b"} Dec 13 03:33:30 crc kubenswrapper[5070]: I1213 03:33:30.719878 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"965e1f2f-3f50-4411-8006-4db60cb5a504","Type":"ContainerStarted","Data":"282fd72811e59357ff5a9edab91054f1508c67580e6a34ff904bbed8b37a7499"} Dec 13 03:33:33 crc kubenswrapper[5070]: I1213 03:33:33.782789 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-qpqfd"] Dec 13 03:33:33 crc kubenswrapper[5070]: I1213 03:33:33.785141 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:33 crc kubenswrapper[5070]: I1213 03:33:33.788419 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 13 03:33:33 crc kubenswrapper[5070]: I1213 03:33:33.811804 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-qpqfd"] Dec 13 03:33:33 crc kubenswrapper[5070]: I1213 03:33:33.945898 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:33 crc kubenswrapper[5070]: I1213 03:33:33.946009 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-dns-svc\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:33 crc kubenswrapper[5070]: I1213 03:33:33.946030 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z884f\" (UniqueName: \"kubernetes.io/projected/14f47cf4-0a1c-4135-9052-e3cf357d9580-kube-api-access-z884f\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:33 crc kubenswrapper[5070]: I1213 03:33:33.946056 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-config\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:33 crc kubenswrapper[5070]: I1213 03:33:33.946083 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:33 crc kubenswrapper[5070]: I1213 03:33:33.946103 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.047375 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.047414 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.047518 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.047594 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-dns-svc\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.047611 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z884f\" (UniqueName: \"kubernetes.io/projected/14f47cf4-0a1c-4135-9052-e3cf357d9580-kube-api-access-z884f\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.047640 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-config\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.048626 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.048628 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-config\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.048665 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.048734 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.049103 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-dns-svc\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.070651 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z884f\" (UniqueName: \"kubernetes.io/projected/14f47cf4-0a1c-4135-9052-e3cf357d9580-kube-api-access-z884f\") pod \"dnsmasq-dns-578b8d767c-qpqfd\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.112296 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.617289 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-qpqfd"] Dec 13 03:33:34 crc kubenswrapper[5070]: I1213 03:33:34.776293 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" event={"ID":"14f47cf4-0a1c-4135-9052-e3cf357d9580","Type":"ContainerStarted","Data":"d85e6671976927c6728d35cc6adafb2367b6a1cd685d647188a1dc595ec48be6"} Dec 13 03:33:35 crc kubenswrapper[5070]: I1213 03:33:35.784483 5070 generic.go:334] "Generic (PLEG): container finished" podID="14f47cf4-0a1c-4135-9052-e3cf357d9580" containerID="04f4cd0c5e253f0c62065cedf9d1dc0098dfe69f9b34f9b0812d65edfa6548ff" exitCode=0 Dec 13 03:33:35 crc kubenswrapper[5070]: I1213 03:33:35.784599 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" event={"ID":"14f47cf4-0a1c-4135-9052-e3cf357d9580","Type":"ContainerDied","Data":"04f4cd0c5e253f0c62065cedf9d1dc0098dfe69f9b34f9b0812d65edfa6548ff"} Dec 13 03:33:36 crc kubenswrapper[5070]: I1213 03:33:36.796603 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" event={"ID":"14f47cf4-0a1c-4135-9052-e3cf357d9580","Type":"ContainerStarted","Data":"24d2835d471fa9a0f7988f651104ce7863436ef6af25c73122eb4a356c97db01"} Dec 13 03:33:36 crc kubenswrapper[5070]: I1213 03:33:36.797157 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:36 crc kubenswrapper[5070]: I1213 03:33:36.822114 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" podStartSLOduration=3.822092894 podStartE2EDuration="3.822092894s" podCreationTimestamp="2025-12-13 03:33:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:33:36.816628334 +0000 UTC m=+1309.052471900" watchObservedRunningTime="2025-12-13 03:33:36.822092894 +0000 UTC m=+1309.057936440" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.113610 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.187853 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5qpfb"] Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.188148 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" podUID="c0887592-9adc-49d0-b351-f3d22bfb2ca2" containerName="dnsmasq-dns" containerID="cri-o://cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49" gracePeriod=10 Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.425318 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-6vl2v"] Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.427520 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.447259 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-6vl2v"] Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.471134 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-dns-svc\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.471243 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-config\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.471265 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-openstack-edpm-ipam\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.471285 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-sb\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.471303 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-nb\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.471331 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxhph\" (UniqueName: \"kubernetes.io/projected/ffe09009-9b3f-4256-b12a-a4ee47c59d45-kube-api-access-pxhph\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.572384 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxhph\" (UniqueName: \"kubernetes.io/projected/ffe09009-9b3f-4256-b12a-a4ee47c59d45-kube-api-access-pxhph\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.572511 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-dns-svc\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.572606 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-config\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.572629 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-openstack-edpm-ipam\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.572656 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-sb\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.572675 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-nb\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.573673 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-nb\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.573685 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-dns-svc\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.573987 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-openstack-edpm-ipam\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.574257 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-sb\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.574709 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-config\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.604669 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxhph\" (UniqueName: \"kubernetes.io/projected/ffe09009-9b3f-4256-b12a-a4ee47c59d45-kube-api-access-pxhph\") pod \"dnsmasq-dns-fbc59fbb7-6vl2v\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.759721 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.775829 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.780203 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-nb\") pod \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.780330 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-sb\") pod \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.780573 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9qvw\" (UniqueName: \"kubernetes.io/projected/c0887592-9adc-49d0-b351-f3d22bfb2ca2-kube-api-access-f9qvw\") pod \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.780636 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-config\") pod \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.780701 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-dns-svc\") pod \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\" (UID: \"c0887592-9adc-49d0-b351-f3d22bfb2ca2\") " Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.787967 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0887592-9adc-49d0-b351-f3d22bfb2ca2-kube-api-access-f9qvw" (OuterVolumeSpecName: "kube-api-access-f9qvw") pod "c0887592-9adc-49d0-b351-f3d22bfb2ca2" (UID: "c0887592-9adc-49d0-b351-f3d22bfb2ca2"). InnerVolumeSpecName "kube-api-access-f9qvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.842182 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c0887592-9adc-49d0-b351-f3d22bfb2ca2" (UID: "c0887592-9adc-49d0-b351-f3d22bfb2ca2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.855529 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c0887592-9adc-49d0-b351-f3d22bfb2ca2" (UID: "c0887592-9adc-49d0-b351-f3d22bfb2ca2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.865264 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-config" (OuterVolumeSpecName: "config") pod "c0887592-9adc-49d0-b351-f3d22bfb2ca2" (UID: "c0887592-9adc-49d0-b351-f3d22bfb2ca2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.885884 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.885907 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9qvw\" (UniqueName: \"kubernetes.io/projected/c0887592-9adc-49d0-b351-f3d22bfb2ca2-kube-api-access-f9qvw\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.885918 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.885926 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.891867 5070 generic.go:334] "Generic (PLEG): container finished" podID="c0887592-9adc-49d0-b351-f3d22bfb2ca2" containerID="cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49" exitCode=0 Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.891901 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" event={"ID":"c0887592-9adc-49d0-b351-f3d22bfb2ca2","Type":"ContainerDied","Data":"cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49"} Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.891925 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" event={"ID":"c0887592-9adc-49d0-b351-f3d22bfb2ca2","Type":"ContainerDied","Data":"004a2d6f4dd77f7ec3e3162e501d42c8b6b1be8f3cd8fa622449cb2a00b581be"} Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.891943 5070 scope.go:117] "RemoveContainer" containerID="cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.892074 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-5qpfb" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.909421 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c0887592-9adc-49d0-b351-f3d22bfb2ca2" (UID: "c0887592-9adc-49d0-b351-f3d22bfb2ca2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.929489 5070 scope.go:117] "RemoveContainer" containerID="c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.948669 5070 scope.go:117] "RemoveContainer" containerID="cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49" Dec 13 03:33:44 crc kubenswrapper[5070]: E1213 03:33:44.950549 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49\": container with ID starting with cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49 not found: ID does not exist" containerID="cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.950586 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49"} err="failed to get container status \"cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49\": rpc error: code = NotFound desc = could not find container \"cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49\": container with ID starting with cdb0bddb9ef22d3c66c62bc10f6ed1929b3bef20dd6a293155693047b8356e49 not found: ID does not exist" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.950609 5070 scope.go:117] "RemoveContainer" containerID="c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76" Dec 13 03:33:44 crc kubenswrapper[5070]: E1213 03:33:44.950924 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76\": container with ID starting with c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76 not found: ID does not exist" containerID="c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.950948 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76"} err="failed to get container status \"c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76\": rpc error: code = NotFound desc = could not find container \"c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76\": container with ID starting with c3eab0715f1879257c2f8ccc7cd2c354a4fc7a7738a67ce251e816ad755e5b76 not found: ID does not exist" Dec 13 03:33:44 crc kubenswrapper[5070]: I1213 03:33:44.987683 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0887592-9adc-49d0-b351-f3d22bfb2ca2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:45 crc kubenswrapper[5070]: I1213 03:33:45.233858 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5qpfb"] Dec 13 03:33:45 crc kubenswrapper[5070]: I1213 03:33:45.244702 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5qpfb"] Dec 13 03:33:45 crc kubenswrapper[5070]: I1213 03:33:45.269024 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-6vl2v"] Dec 13 03:33:45 crc kubenswrapper[5070]: I1213 03:33:45.903951 5070 generic.go:334] "Generic (PLEG): container finished" podID="ffe09009-9b3f-4256-b12a-a4ee47c59d45" containerID="c6e92bdbe59f8cc22c2604afa0651de362556fb5bfb649945a2d2706d0012863" exitCode=0 Dec 13 03:33:45 crc kubenswrapper[5070]: I1213 03:33:45.903994 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" event={"ID":"ffe09009-9b3f-4256-b12a-a4ee47c59d45","Type":"ContainerDied","Data":"c6e92bdbe59f8cc22c2604afa0651de362556fb5bfb649945a2d2706d0012863"} Dec 13 03:33:45 crc kubenswrapper[5070]: I1213 03:33:45.904020 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" event={"ID":"ffe09009-9b3f-4256-b12a-a4ee47c59d45","Type":"ContainerStarted","Data":"f7a3a310aa7d628623df20e8a54fcca2f75cd7fa21c518a2aa5bd9b23ef2ae15"} Dec 13 03:33:46 crc kubenswrapper[5070]: I1213 03:33:46.183096 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0887592-9adc-49d0-b351-f3d22bfb2ca2" path="/var/lib/kubelet/pods/c0887592-9adc-49d0-b351-f3d22bfb2ca2/volumes" Dec 13 03:33:46 crc kubenswrapper[5070]: I1213 03:33:46.914222 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" event={"ID":"ffe09009-9b3f-4256-b12a-a4ee47c59d45","Type":"ContainerStarted","Data":"af437229b7604d90d849225a5a7cd682b4b13b89ae88322aaca5288dc4ab6fd0"} Dec 13 03:33:46 crc kubenswrapper[5070]: I1213 03:33:46.914660 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:46 crc kubenswrapper[5070]: I1213 03:33:46.936127 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" podStartSLOduration=2.9360813500000003 podStartE2EDuration="2.93608135s" podCreationTimestamp="2025-12-13 03:33:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:33:46.933697976 +0000 UTC m=+1319.169541522" watchObservedRunningTime="2025-12-13 03:33:46.93608135 +0000 UTC m=+1319.171924896" Dec 13 03:33:54 crc kubenswrapper[5070]: I1213 03:33:54.762236 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 03:33:54 crc kubenswrapper[5070]: I1213 03:33:54.844574 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-qpqfd"] Dec 13 03:33:54 crc kubenswrapper[5070]: I1213 03:33:54.844843 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" podUID="14f47cf4-0a1c-4135-9052-e3cf357d9580" containerName="dnsmasq-dns" containerID="cri-o://24d2835d471fa9a0f7988f651104ce7863436ef6af25c73122eb4a356c97db01" gracePeriod=10 Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.002537 5070 generic.go:334] "Generic (PLEG): container finished" podID="14f47cf4-0a1c-4135-9052-e3cf357d9580" containerID="24d2835d471fa9a0f7988f651104ce7863436ef6af25c73122eb4a356c97db01" exitCode=0 Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.002611 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" event={"ID":"14f47cf4-0a1c-4135-9052-e3cf357d9580","Type":"ContainerDied","Data":"24d2835d471fa9a0f7988f651104ce7863436ef6af25c73122eb4a356c97db01"} Dec 13 03:33:55 crc kubenswrapper[5070]: E1213 03:33:55.052050 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14f47cf4_0a1c_4135_9052_e3cf357d9580.slice/crio-conmon-24d2835d471fa9a0f7988f651104ce7863436ef6af25c73122eb4a356c97db01.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14f47cf4_0a1c_4135_9052_e3cf357d9580.slice/crio-24d2835d471fa9a0f7988f651104ce7863436ef6af25c73122eb4a356c97db01.scope\": RecentStats: unable to find data in memory cache]" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.311786 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.440076 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-openstack-edpm-ipam\") pod \"14f47cf4-0a1c-4135-9052-e3cf357d9580\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.440208 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z884f\" (UniqueName: \"kubernetes.io/projected/14f47cf4-0a1c-4135-9052-e3cf357d9580-kube-api-access-z884f\") pod \"14f47cf4-0a1c-4135-9052-e3cf357d9580\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.440248 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-nb\") pod \"14f47cf4-0a1c-4135-9052-e3cf357d9580\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.440273 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-config\") pod \"14f47cf4-0a1c-4135-9052-e3cf357d9580\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.440296 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-sb\") pod \"14f47cf4-0a1c-4135-9052-e3cf357d9580\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.440351 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-dns-svc\") pod \"14f47cf4-0a1c-4135-9052-e3cf357d9580\" (UID: \"14f47cf4-0a1c-4135-9052-e3cf357d9580\") " Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.453729 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14f47cf4-0a1c-4135-9052-e3cf357d9580-kube-api-access-z884f" (OuterVolumeSpecName: "kube-api-access-z884f") pod "14f47cf4-0a1c-4135-9052-e3cf357d9580" (UID: "14f47cf4-0a1c-4135-9052-e3cf357d9580"). InnerVolumeSpecName "kube-api-access-z884f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.486319 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "14f47cf4-0a1c-4135-9052-e3cf357d9580" (UID: "14f47cf4-0a1c-4135-9052-e3cf357d9580"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.489853 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-config" (OuterVolumeSpecName: "config") pod "14f47cf4-0a1c-4135-9052-e3cf357d9580" (UID: "14f47cf4-0a1c-4135-9052-e3cf357d9580"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.498053 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "14f47cf4-0a1c-4135-9052-e3cf357d9580" (UID: "14f47cf4-0a1c-4135-9052-e3cf357d9580"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.502665 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "14f47cf4-0a1c-4135-9052-e3cf357d9580" (UID: "14f47cf4-0a1c-4135-9052-e3cf357d9580"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.514438 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "14f47cf4-0a1c-4135-9052-e3cf357d9580" (UID: "14f47cf4-0a1c-4135-9052-e3cf357d9580"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.542509 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.542621 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.542633 5070 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.542644 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z884f\" (UniqueName: \"kubernetes.io/projected/14f47cf4-0a1c-4135-9052-e3cf357d9580-kube-api-access-z884f\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.542657 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:55 crc kubenswrapper[5070]: I1213 03:33:55.542668 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14f47cf4-0a1c-4135-9052-e3cf357d9580-config\") on node \"crc\" DevicePath \"\"" Dec 13 03:33:56 crc kubenswrapper[5070]: I1213 03:33:56.013840 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" event={"ID":"14f47cf4-0a1c-4135-9052-e3cf357d9580","Type":"ContainerDied","Data":"d85e6671976927c6728d35cc6adafb2367b6a1cd685d647188a1dc595ec48be6"} Dec 13 03:33:56 crc kubenswrapper[5070]: I1213 03:33:56.014219 5070 scope.go:117] "RemoveContainer" containerID="24d2835d471fa9a0f7988f651104ce7863436ef6af25c73122eb4a356c97db01" Dec 13 03:33:56 crc kubenswrapper[5070]: I1213 03:33:56.014219 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-qpqfd" Dec 13 03:33:56 crc kubenswrapper[5070]: I1213 03:33:56.045571 5070 scope.go:117] "RemoveContainer" containerID="04f4cd0c5e253f0c62065cedf9d1dc0098dfe69f9b34f9b0812d65edfa6548ff" Dec 13 03:33:56 crc kubenswrapper[5070]: I1213 03:33:56.060637 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-qpqfd"] Dec 13 03:33:56 crc kubenswrapper[5070]: I1213 03:33:56.068488 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-qpqfd"] Dec 13 03:33:56 crc kubenswrapper[5070]: I1213 03:33:56.177113 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14f47cf4-0a1c-4135-9052-e3cf357d9580" path="/var/lib/kubelet/pods/14f47cf4-0a1c-4135-9052-e3cf357d9580/volumes" Dec 13 03:34:03 crc kubenswrapper[5070]: I1213 03:34:03.095743 5070 generic.go:334] "Generic (PLEG): container finished" podID="965e1f2f-3f50-4411-8006-4db60cb5a504" containerID="282fd72811e59357ff5a9edab91054f1508c67580e6a34ff904bbed8b37a7499" exitCode=0 Dec 13 03:34:03 crc kubenswrapper[5070]: I1213 03:34:03.095882 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"965e1f2f-3f50-4411-8006-4db60cb5a504","Type":"ContainerDied","Data":"282fd72811e59357ff5a9edab91054f1508c67580e6a34ff904bbed8b37a7499"} Dec 13 03:34:04 crc kubenswrapper[5070]: I1213 03:34:04.109469 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"965e1f2f-3f50-4411-8006-4db60cb5a504","Type":"ContainerStarted","Data":"f2eb73f17ddc7da38686af1ae15af32df170c8c6efea97da3488819c5236e73b"} Dec 13 03:34:04 crc kubenswrapper[5070]: I1213 03:34:04.110171 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 13 03:34:04 crc kubenswrapper[5070]: I1213 03:34:04.111331 5070 generic.go:334] "Generic (PLEG): container finished" podID="46b708f1-eabc-44f5-8388-8e6b42d66fd0" containerID="2ce4bfdc86dd74f8f61529fba986df028e7efbfdee01497c6203c9377b7db38b" exitCode=0 Dec 13 03:34:04 crc kubenswrapper[5070]: I1213 03:34:04.111358 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"46b708f1-eabc-44f5-8388-8e6b42d66fd0","Type":"ContainerDied","Data":"2ce4bfdc86dd74f8f61529fba986df028e7efbfdee01497c6203c9377b7db38b"} Dec 13 03:34:04 crc kubenswrapper[5070]: I1213 03:34:04.188598 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.188574941 podStartE2EDuration="37.188574941s" podCreationTimestamp="2025-12-13 03:33:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:34:04.142126848 +0000 UTC m=+1336.377970464" watchObservedRunningTime="2025-12-13 03:34:04.188574941 +0000 UTC m=+1336.424418487" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.122965 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"46b708f1-eabc-44f5-8388-8e6b42d66fd0","Type":"ContainerStarted","Data":"6fd42eb0c6f1fcd7be38ae7d062df91f9fd6c2b7e6ecd1f3388ae49364802c8a"} Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.123688 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.174185 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn"] Dec 13 03:34:05 crc kubenswrapper[5070]: E1213 03:34:05.174636 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0887592-9adc-49d0-b351-f3d22bfb2ca2" containerName="init" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.174652 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0887592-9adc-49d0-b351-f3d22bfb2ca2" containerName="init" Dec 13 03:34:05 crc kubenswrapper[5070]: E1213 03:34:05.174701 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14f47cf4-0a1c-4135-9052-e3cf357d9580" containerName="init" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.174708 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="14f47cf4-0a1c-4135-9052-e3cf357d9580" containerName="init" Dec 13 03:34:05 crc kubenswrapper[5070]: E1213 03:34:05.174729 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14f47cf4-0a1c-4135-9052-e3cf357d9580" containerName="dnsmasq-dns" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.174735 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="14f47cf4-0a1c-4135-9052-e3cf357d9580" containerName="dnsmasq-dns" Dec 13 03:34:05 crc kubenswrapper[5070]: E1213 03:34:05.174750 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0887592-9adc-49d0-b351-f3d22bfb2ca2" containerName="dnsmasq-dns" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.174756 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0887592-9adc-49d0-b351-f3d22bfb2ca2" containerName="dnsmasq-dns" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.174920 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="14f47cf4-0a1c-4135-9052-e3cf357d9580" containerName="dnsmasq-dns" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.174938 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0887592-9adc-49d0-b351-f3d22bfb2ca2" containerName="dnsmasq-dns" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.175605 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.188323 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.188294418 podStartE2EDuration="38.188294418s" podCreationTimestamp="2025-12-13 03:33:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 03:34:05.153081953 +0000 UTC m=+1337.388925539" watchObservedRunningTime="2025-12-13 03:34:05.188294418 +0000 UTC m=+1337.424137984" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.196389 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.196802 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.200997 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.201252 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.230954 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn"] Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.285586 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v576v\" (UniqueName: \"kubernetes.io/projected/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-kube-api-access-v576v\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.285741 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.285793 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.286005 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.387839 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v576v\" (UniqueName: \"kubernetes.io/projected/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-kube-api-access-v576v\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.388489 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.388624 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.388797 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.395980 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.396066 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.397939 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.413063 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v576v\" (UniqueName: \"kubernetes.io/projected/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-kube-api-access-v576v\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:05 crc kubenswrapper[5070]: I1213 03:34:05.496191 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:06 crc kubenswrapper[5070]: I1213 03:34:06.020612 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn"] Dec 13 03:34:06 crc kubenswrapper[5070]: W1213 03:34:06.025646 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72639ee2_e2ea_4ca5_ad91_5998c4780ba0.slice/crio-d16e223823ec2036c219c66a06d3ec8c62318d365b736502e76ef34061870ee1 WatchSource:0}: Error finding container d16e223823ec2036c219c66a06d3ec8c62318d365b736502e76ef34061870ee1: Status 404 returned error can't find the container with id d16e223823ec2036c219c66a06d3ec8c62318d365b736502e76ef34061870ee1 Dec 13 03:34:06 crc kubenswrapper[5070]: I1213 03:34:06.133043 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" event={"ID":"72639ee2-e2ea-4ca5-ad91-5998c4780ba0","Type":"ContainerStarted","Data":"d16e223823ec2036c219c66a06d3ec8c62318d365b736502e76ef34061870ee1"} Dec 13 03:34:15 crc kubenswrapper[5070]: I1213 03:34:15.220704 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" event={"ID":"72639ee2-e2ea-4ca5-ad91-5998c4780ba0","Type":"ContainerStarted","Data":"56ed47b0d0a6b465d2bcc04f090abcaf04ff5b7cead3fd6691b56ee0e6822908"} Dec 13 03:34:15 crc kubenswrapper[5070]: I1213 03:34:15.238844 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" podStartSLOduration=1.8132910039999999 podStartE2EDuration="10.238825637s" podCreationTimestamp="2025-12-13 03:34:05 +0000 UTC" firstStartedPulling="2025-12-13 03:34:06.027069335 +0000 UTC m=+1338.262912881" lastFinishedPulling="2025-12-13 03:34:14.452603968 +0000 UTC m=+1346.688447514" observedRunningTime="2025-12-13 03:34:15.237083298 +0000 UTC m=+1347.472926854" watchObservedRunningTime="2025-12-13 03:34:15.238825637 +0000 UTC m=+1347.474669183" Dec 13 03:34:18 crc kubenswrapper[5070]: I1213 03:34:18.164654 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 13 03:34:18 crc kubenswrapper[5070]: I1213 03:34:18.201678 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 13 03:34:30 crc kubenswrapper[5070]: I1213 03:34:30.387301 5070 generic.go:334] "Generic (PLEG): container finished" podID="72639ee2-e2ea-4ca5-ad91-5998c4780ba0" containerID="56ed47b0d0a6b465d2bcc04f090abcaf04ff5b7cead3fd6691b56ee0e6822908" exitCode=0 Dec 13 03:34:30 crc kubenswrapper[5070]: I1213 03:34:30.387349 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" event={"ID":"72639ee2-e2ea-4ca5-ad91-5998c4780ba0","Type":"ContainerDied","Data":"56ed47b0d0a6b465d2bcc04f090abcaf04ff5b7cead3fd6691b56ee0e6822908"} Dec 13 03:34:31 crc kubenswrapper[5070]: I1213 03:34:31.811947 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:31 crc kubenswrapper[5070]: I1213 03:34:31.918794 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-repo-setup-combined-ca-bundle\") pod \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " Dec 13 03:34:31 crc kubenswrapper[5070]: I1213 03:34:31.918832 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v576v\" (UniqueName: \"kubernetes.io/projected/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-kube-api-access-v576v\") pod \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " Dec 13 03:34:31 crc kubenswrapper[5070]: I1213 03:34:31.918952 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-ssh-key\") pod \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " Dec 13 03:34:31 crc kubenswrapper[5070]: I1213 03:34:31.918983 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-inventory\") pod \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\" (UID: \"72639ee2-e2ea-4ca5-ad91-5998c4780ba0\") " Dec 13 03:34:31 crc kubenswrapper[5070]: I1213 03:34:31.924545 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-kube-api-access-v576v" (OuterVolumeSpecName: "kube-api-access-v576v") pod "72639ee2-e2ea-4ca5-ad91-5998c4780ba0" (UID: "72639ee2-e2ea-4ca5-ad91-5998c4780ba0"). InnerVolumeSpecName "kube-api-access-v576v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:34:31 crc kubenswrapper[5070]: I1213 03:34:31.925527 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "72639ee2-e2ea-4ca5-ad91-5998c4780ba0" (UID: "72639ee2-e2ea-4ca5-ad91-5998c4780ba0"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:34:31 crc kubenswrapper[5070]: I1213 03:34:31.945420 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-inventory" (OuterVolumeSpecName: "inventory") pod "72639ee2-e2ea-4ca5-ad91-5998c4780ba0" (UID: "72639ee2-e2ea-4ca5-ad91-5998c4780ba0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:34:31 crc kubenswrapper[5070]: I1213 03:34:31.949972 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "72639ee2-e2ea-4ca5-ad91-5998c4780ba0" (UID: "72639ee2-e2ea-4ca5-ad91-5998c4780ba0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.022251 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.022457 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.022535 5070 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.022611 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v576v\" (UniqueName: \"kubernetes.io/projected/72639ee2-e2ea-4ca5-ad91-5998c4780ba0-kube-api-access-v576v\") on node \"crc\" DevicePath \"\"" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.407787 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" event={"ID":"72639ee2-e2ea-4ca5-ad91-5998c4780ba0","Type":"ContainerDied","Data":"d16e223823ec2036c219c66a06d3ec8c62318d365b736502e76ef34061870ee1"} Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.407831 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d16e223823ec2036c219c66a06d3ec8c62318d365b736502e76ef34061870ee1" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.407882 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.491392 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4"] Dec 13 03:34:32 crc kubenswrapper[5070]: E1213 03:34:32.492117 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72639ee2-e2ea-4ca5-ad91-5998c4780ba0" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.492150 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="72639ee2-e2ea-4ca5-ad91-5998c4780ba0" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.492437 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="72639ee2-e2ea-4ca5-ad91-5998c4780ba0" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.493261 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.496180 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.496233 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.496180 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.496597 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.501634 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4"] Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.635630 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.635676 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.635718 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tj5q\" (UniqueName: \"kubernetes.io/projected/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-kube-api-access-9tj5q\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.635746 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.737211 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.737269 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.737321 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tj5q\" (UniqueName: \"kubernetes.io/projected/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-kube-api-access-9tj5q\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.737354 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.741198 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.741762 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.741966 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.760895 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tj5q\" (UniqueName: \"kubernetes.io/projected/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-kube-api-access-9tj5q\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:32 crc kubenswrapper[5070]: I1213 03:34:32.821025 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:34:33 crc kubenswrapper[5070]: I1213 03:34:33.391403 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4"] Dec 13 03:34:33 crc kubenswrapper[5070]: I1213 03:34:33.419391 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" event={"ID":"201e6f0e-784e-4526-bcbd-3c6ca1d592b6","Type":"ContainerStarted","Data":"49e183d5c5232dbf790ef251f96a8e35a82568fe0e1710a952dc29fbf6ecc749"} Dec 13 03:34:35 crc kubenswrapper[5070]: I1213 03:34:35.449052 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" event={"ID":"201e6f0e-784e-4526-bcbd-3c6ca1d592b6","Type":"ContainerStarted","Data":"e24ab748bc62e54fc73dfdb116c3786c37bce75dab0cd26250000d68d0ade04e"} Dec 13 03:34:35 crc kubenswrapper[5070]: I1213 03:34:35.471510 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" podStartSLOduration=2.663972944 podStartE2EDuration="3.471416043s" podCreationTimestamp="2025-12-13 03:34:32 +0000 UTC" firstStartedPulling="2025-12-13 03:34:33.391991064 +0000 UTC m=+1365.627834650" lastFinishedPulling="2025-12-13 03:34:34.199434163 +0000 UTC m=+1366.435277749" observedRunningTime="2025-12-13 03:34:35.465936353 +0000 UTC m=+1367.701779899" watchObservedRunningTime="2025-12-13 03:34:35.471416043 +0000 UTC m=+1367.707259629" Dec 13 03:35:23 crc kubenswrapper[5070]: I1213 03:35:23.503457 5070 scope.go:117] "RemoveContainer" containerID="e6450eacfbe9fc1a61285d8cfc0d7bf6cf4651537676991d6c2c308e159a158a" Dec 13 03:35:23 crc kubenswrapper[5070]: I1213 03:35:23.537565 5070 scope.go:117] "RemoveContainer" containerID="33003ab2fb9eb934abf617d765789be5f82404c9f0f62b664f7771d0588edc3c" Dec 13 03:35:23 crc kubenswrapper[5070]: I1213 03:35:23.569492 5070 scope.go:117] "RemoveContainer" containerID="6c4ef47d6ecc0948e5e89e4b0a9e280fb1e18267e47c12f63307114b9cf1d371" Dec 13 03:35:51 crc kubenswrapper[5070]: I1213 03:35:51.943831 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:35:51 crc kubenswrapper[5070]: I1213 03:35:51.944608 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:36:21 crc kubenswrapper[5070]: I1213 03:36:21.943208 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:36:21 crc kubenswrapper[5070]: I1213 03:36:21.943906 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:36:23 crc kubenswrapper[5070]: I1213 03:36:23.647011 5070 scope.go:117] "RemoveContainer" containerID="c97dc7d97309ad86b2bb1f11e2b677901fdcb60378be5f7fc0a07467eee18526" Dec 13 03:36:51 crc kubenswrapper[5070]: I1213 03:36:51.943020 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:36:51 crc kubenswrapper[5070]: I1213 03:36:51.943658 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:36:51 crc kubenswrapper[5070]: I1213 03:36:51.943710 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:36:51 crc kubenswrapper[5070]: I1213 03:36:51.944646 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cca15ae9a649254597cf251cc63ec7ab70879a87c44cea845775d4020af3f28d"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 03:36:51 crc kubenswrapper[5070]: I1213 03:36:51.944707 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://cca15ae9a649254597cf251cc63ec7ab70879a87c44cea845775d4020af3f28d" gracePeriod=600 Dec 13 03:36:52 crc kubenswrapper[5070]: I1213 03:36:52.790138 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="cca15ae9a649254597cf251cc63ec7ab70879a87c44cea845775d4020af3f28d" exitCode=0 Dec 13 03:36:52 crc kubenswrapper[5070]: I1213 03:36:52.790182 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"cca15ae9a649254597cf251cc63ec7ab70879a87c44cea845775d4020af3f28d"} Dec 13 03:36:52 crc kubenswrapper[5070]: I1213 03:36:52.791171 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8"} Dec 13 03:36:52 crc kubenswrapper[5070]: I1213 03:36:52.791285 5070 scope.go:117] "RemoveContainer" containerID="0e606491cad6e3bd7c4c4b366efe48c2688d128966fcc1905da9fc58a2f148b4" Dec 13 03:37:55 crc kubenswrapper[5070]: I1213 03:37:55.421679 5070 generic.go:334] "Generic (PLEG): container finished" podID="201e6f0e-784e-4526-bcbd-3c6ca1d592b6" containerID="e24ab748bc62e54fc73dfdb116c3786c37bce75dab0cd26250000d68d0ade04e" exitCode=0 Dec 13 03:37:55 crc kubenswrapper[5070]: I1213 03:37:55.421830 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" event={"ID":"201e6f0e-784e-4526-bcbd-3c6ca1d592b6","Type":"ContainerDied","Data":"e24ab748bc62e54fc73dfdb116c3786c37bce75dab0cd26250000d68d0ade04e"} Dec 13 03:37:56 crc kubenswrapper[5070]: I1213 03:37:56.877921 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.051569 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-bootstrap-combined-ca-bundle\") pod \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.051735 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-inventory\") pod \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.051801 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tj5q\" (UniqueName: \"kubernetes.io/projected/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-kube-api-access-9tj5q\") pod \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.051911 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-ssh-key\") pod \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\" (UID: \"201e6f0e-784e-4526-bcbd-3c6ca1d592b6\") " Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.058006 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "201e6f0e-784e-4526-bcbd-3c6ca1d592b6" (UID: "201e6f0e-784e-4526-bcbd-3c6ca1d592b6"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.058008 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-kube-api-access-9tj5q" (OuterVolumeSpecName: "kube-api-access-9tj5q") pod "201e6f0e-784e-4526-bcbd-3c6ca1d592b6" (UID: "201e6f0e-784e-4526-bcbd-3c6ca1d592b6"). InnerVolumeSpecName "kube-api-access-9tj5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.092627 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "201e6f0e-784e-4526-bcbd-3c6ca1d592b6" (UID: "201e6f0e-784e-4526-bcbd-3c6ca1d592b6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.101588 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-inventory" (OuterVolumeSpecName: "inventory") pod "201e6f0e-784e-4526-bcbd-3c6ca1d592b6" (UID: "201e6f0e-784e-4526-bcbd-3c6ca1d592b6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.156431 5070 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.156587 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.156606 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tj5q\" (UniqueName: \"kubernetes.io/projected/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-kube-api-access-9tj5q\") on node \"crc\" DevicePath \"\"" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.156626 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/201e6f0e-784e-4526-bcbd-3c6ca1d592b6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.445617 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" event={"ID":"201e6f0e-784e-4526-bcbd-3c6ca1d592b6","Type":"ContainerDied","Data":"49e183d5c5232dbf790ef251f96a8e35a82568fe0e1710a952dc29fbf6ecc749"} Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.445673 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49e183d5c5232dbf790ef251f96a8e35a82568fe0e1710a952dc29fbf6ecc749" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.445687 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.544429 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t"] Dec 13 03:37:57 crc kubenswrapper[5070]: E1213 03:37:57.544938 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201e6f0e-784e-4526-bcbd-3c6ca1d592b6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.544962 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="201e6f0e-784e-4526-bcbd-3c6ca1d592b6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.545219 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="201e6f0e-784e-4526-bcbd-3c6ca1d592b6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.554740 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.567381 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.568956 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.569139 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.569354 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.573065 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t"] Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.679687 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.679756 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2skl5\" (UniqueName: \"kubernetes.io/projected/639f210d-0488-4906-9a1e-00fdc9e1c77f-kube-api-access-2skl5\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.679862 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.781637 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.781815 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.781854 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2skl5\" (UniqueName: \"kubernetes.io/projected/639f210d-0488-4906-9a1e-00fdc9e1c77f-kube-api-access-2skl5\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.787044 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.787044 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.816292 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2skl5\" (UniqueName: \"kubernetes.io/projected/639f210d-0488-4906-9a1e-00fdc9e1c77f-kube-api-access-2skl5\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:57 crc kubenswrapper[5070]: I1213 03:37:57.889088 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:37:58 crc kubenswrapper[5070]: I1213 03:37:58.467011 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t"] Dec 13 03:37:58 crc kubenswrapper[5070]: I1213 03:37:58.469148 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 03:37:59 crc kubenswrapper[5070]: I1213 03:37:59.475932 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" event={"ID":"639f210d-0488-4906-9a1e-00fdc9e1c77f","Type":"ContainerStarted","Data":"5694659e768a246534cde48e4ca07e0d6789d5e57afe731f08bb3c6ccca9b022"} Dec 13 03:38:00 crc kubenswrapper[5070]: I1213 03:38:00.488663 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" event={"ID":"639f210d-0488-4906-9a1e-00fdc9e1c77f","Type":"ContainerStarted","Data":"add073fb630bf030820b5fd5f2a9f8959cd8acf33a1076c1ee38e8884709b36f"} Dec 13 03:38:00 crc kubenswrapper[5070]: I1213 03:38:00.512129 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" podStartSLOduration=2.455160302 podStartE2EDuration="3.512105515s" podCreationTimestamp="2025-12-13 03:37:57 +0000 UTC" firstStartedPulling="2025-12-13 03:37:58.468824948 +0000 UTC m=+1570.704668504" lastFinishedPulling="2025-12-13 03:37:59.525770171 +0000 UTC m=+1571.761613717" observedRunningTime="2025-12-13 03:38:00.504844087 +0000 UTC m=+1572.740687633" watchObservedRunningTime="2025-12-13 03:38:00.512105515 +0000 UTC m=+1572.747949061" Dec 13 03:38:39 crc kubenswrapper[5070]: I1213 03:38:39.045626 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-hsxvf"] Dec 13 03:38:39 crc kubenswrapper[5070]: I1213 03:38:39.057682 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-hsxvf"] Dec 13 03:38:40 crc kubenswrapper[5070]: I1213 03:38:40.180276 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cae1b496-66ab-43e4-9a6a-95cb71b5fd49" path="/var/lib/kubelet/pods/cae1b496-66ab-43e4-9a6a-95cb71b5fd49/volumes" Dec 13 03:38:43 crc kubenswrapper[5070]: I1213 03:38:43.056695 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-czmmk"] Dec 13 03:38:43 crc kubenswrapper[5070]: I1213 03:38:43.067412 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-9shm5"] Dec 13 03:38:43 crc kubenswrapper[5070]: I1213 03:38:43.077524 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-czmmk"] Dec 13 03:38:43 crc kubenswrapper[5070]: I1213 03:38:43.087089 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-9shm5"] Dec 13 03:38:44 crc kubenswrapper[5070]: I1213 03:38:44.181333 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e0d7597-7e74-4b63-a689-d545690e3715" path="/var/lib/kubelet/pods/8e0d7597-7e74-4b63-a689-d545690e3715/volumes" Dec 13 03:38:44 crc kubenswrapper[5070]: I1213 03:38:44.183046 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9352dffa-7117-42a7-b012-a78eee3dae3a" path="/var/lib/kubelet/pods/9352dffa-7117-42a7-b012-a78eee3dae3a/volumes" Dec 13 03:38:49 crc kubenswrapper[5070]: I1213 03:38:49.036070 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-9dbc-account-create-9dwbb"] Dec 13 03:38:49 crc kubenswrapper[5070]: I1213 03:38:49.047746 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-9dbc-account-create-9dwbb"] Dec 13 03:38:50 crc kubenswrapper[5070]: I1213 03:38:50.176908 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6" path="/var/lib/kubelet/pods/d51c8da0-0e5a-42d2-9a28-4e4ba8c534d6/volumes" Dec 13 03:38:54 crc kubenswrapper[5070]: I1213 03:38:54.041987 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-70a4-account-create-xgzss"] Dec 13 03:38:54 crc kubenswrapper[5070]: I1213 03:38:54.051150 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-54d5-account-create-nh9pv"] Dec 13 03:38:54 crc kubenswrapper[5070]: I1213 03:38:54.059102 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-70a4-account-create-xgzss"] Dec 13 03:38:54 crc kubenswrapper[5070]: I1213 03:38:54.066621 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-54d5-account-create-nh9pv"] Dec 13 03:38:54 crc kubenswrapper[5070]: I1213 03:38:54.242804 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="578e1316-3a47-4516-89e8-59f936a22495" path="/var/lib/kubelet/pods/578e1316-3a47-4516-89e8-59f936a22495/volumes" Dec 13 03:38:54 crc kubenswrapper[5070]: I1213 03:38:54.243772 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd03ee19-a6d8-4999-8b43-d31b4d159a8d" path="/var/lib/kubelet/pods/cd03ee19-a6d8-4999-8b43-d31b4d159a8d/volumes" Dec 13 03:39:13 crc kubenswrapper[5070]: I1213 03:39:13.156427 5070 generic.go:334] "Generic (PLEG): container finished" podID="639f210d-0488-4906-9a1e-00fdc9e1c77f" containerID="add073fb630bf030820b5fd5f2a9f8959cd8acf33a1076c1ee38e8884709b36f" exitCode=0 Dec 13 03:39:13 crc kubenswrapper[5070]: I1213 03:39:13.156531 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" event={"ID":"639f210d-0488-4906-9a1e-00fdc9e1c77f","Type":"ContainerDied","Data":"add073fb630bf030820b5fd5f2a9f8959cd8acf33a1076c1ee38e8884709b36f"} Dec 13 03:39:14 crc kubenswrapper[5070]: I1213 03:39:14.622697 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:39:14 crc kubenswrapper[5070]: I1213 03:39:14.706488 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-inventory\") pod \"639f210d-0488-4906-9a1e-00fdc9e1c77f\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " Dec 13 03:39:14 crc kubenswrapper[5070]: I1213 03:39:14.706597 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-ssh-key\") pod \"639f210d-0488-4906-9a1e-00fdc9e1c77f\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " Dec 13 03:39:14 crc kubenswrapper[5070]: I1213 03:39:14.706673 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2skl5\" (UniqueName: \"kubernetes.io/projected/639f210d-0488-4906-9a1e-00fdc9e1c77f-kube-api-access-2skl5\") pod \"639f210d-0488-4906-9a1e-00fdc9e1c77f\" (UID: \"639f210d-0488-4906-9a1e-00fdc9e1c77f\") " Dec 13 03:39:14 crc kubenswrapper[5070]: I1213 03:39:14.713160 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/639f210d-0488-4906-9a1e-00fdc9e1c77f-kube-api-access-2skl5" (OuterVolumeSpecName: "kube-api-access-2skl5") pod "639f210d-0488-4906-9a1e-00fdc9e1c77f" (UID: "639f210d-0488-4906-9a1e-00fdc9e1c77f"). InnerVolumeSpecName "kube-api-access-2skl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:39:14 crc kubenswrapper[5070]: I1213 03:39:14.734998 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-inventory" (OuterVolumeSpecName: "inventory") pod "639f210d-0488-4906-9a1e-00fdc9e1c77f" (UID: "639f210d-0488-4906-9a1e-00fdc9e1c77f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:39:14 crc kubenswrapper[5070]: I1213 03:39:14.737855 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "639f210d-0488-4906-9a1e-00fdc9e1c77f" (UID: "639f210d-0488-4906-9a1e-00fdc9e1c77f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:39:14 crc kubenswrapper[5070]: I1213 03:39:14.809410 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:39:14 crc kubenswrapper[5070]: I1213 03:39:14.809468 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/639f210d-0488-4906-9a1e-00fdc9e1c77f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:39:14 crc kubenswrapper[5070]: I1213 03:39:14.809479 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2skl5\" (UniqueName: \"kubernetes.io/projected/639f210d-0488-4906-9a1e-00fdc9e1c77f-kube-api-access-2skl5\") on node \"crc\" DevicePath \"\"" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.179829 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" event={"ID":"639f210d-0488-4906-9a1e-00fdc9e1c77f","Type":"ContainerDied","Data":"5694659e768a246534cde48e4ca07e0d6789d5e57afe731f08bb3c6ccca9b022"} Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.179894 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5694659e768a246534cde48e4ca07e0d6789d5e57afe731f08bb3c6ccca9b022" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.180009 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.270707 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds"] Dec 13 03:39:15 crc kubenswrapper[5070]: E1213 03:39:15.271131 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="639f210d-0488-4906-9a1e-00fdc9e1c77f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.271151 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="639f210d-0488-4906-9a1e-00fdc9e1c77f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.271514 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="639f210d-0488-4906-9a1e-00fdc9e1c77f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.272382 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.280369 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.280461 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.280685 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.280873 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.293305 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds"] Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.318180 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.318299 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.318365 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh9rn\" (UniqueName: \"kubernetes.io/projected/a500beac-00af-444d-8ca8-52c3c7c0af60-kube-api-access-wh9rn\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.419783 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.419878 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.419943 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh9rn\" (UniqueName: \"kubernetes.io/projected/a500beac-00af-444d-8ca8-52c3c7c0af60-kube-api-access-wh9rn\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.424477 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.424570 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.441533 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh9rn\" (UniqueName: \"kubernetes.io/projected/a500beac-00af-444d-8ca8-52c3c7c0af60-kube-api-access-wh9rn\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.593686 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:15 crc kubenswrapper[5070]: I1213 03:39:15.946158 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds"] Dec 13 03:39:16 crc kubenswrapper[5070]: I1213 03:39:16.189271 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" event={"ID":"a500beac-00af-444d-8ca8-52c3c7c0af60","Type":"ContainerStarted","Data":"77379b50722d00f98b13c0c16ca84b52349b656a35e155c17b5fdc30dec5bcb8"} Dec 13 03:39:17 crc kubenswrapper[5070]: I1213 03:39:17.200268 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" event={"ID":"a500beac-00af-444d-8ca8-52c3c7c0af60","Type":"ContainerStarted","Data":"6edad932fa3ff3c2ef609119b2b212d2ba7afbd2ab1fbb62dd76f3c965d2e43b"} Dec 13 03:39:17 crc kubenswrapper[5070]: I1213 03:39:17.225743 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" podStartSLOduration=1.761013316 podStartE2EDuration="2.225719627s" podCreationTimestamp="2025-12-13 03:39:15 +0000 UTC" firstStartedPulling="2025-12-13 03:39:15.941539758 +0000 UTC m=+1648.177383304" lastFinishedPulling="2025-12-13 03:39:16.406246059 +0000 UTC m=+1648.642089615" observedRunningTime="2025-12-13 03:39:17.219762444 +0000 UTC m=+1649.455606010" watchObservedRunningTime="2025-12-13 03:39:17.225719627 +0000 UTC m=+1649.461563223" Dec 13 03:39:21 crc kubenswrapper[5070]: I1213 03:39:21.231109 5070 generic.go:334] "Generic (PLEG): container finished" podID="a500beac-00af-444d-8ca8-52c3c7c0af60" containerID="6edad932fa3ff3c2ef609119b2b212d2ba7afbd2ab1fbb62dd76f3c965d2e43b" exitCode=0 Dec 13 03:39:21 crc kubenswrapper[5070]: I1213 03:39:21.231192 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" event={"ID":"a500beac-00af-444d-8ca8-52c3c7c0af60","Type":"ContainerDied","Data":"6edad932fa3ff3c2ef609119b2b212d2ba7afbd2ab1fbb62dd76f3c965d2e43b"} Dec 13 03:39:21 crc kubenswrapper[5070]: I1213 03:39:21.942732 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:39:21 crc kubenswrapper[5070]: I1213 03:39:21.942822 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:39:22 crc kubenswrapper[5070]: I1213 03:39:22.645750 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:22 crc kubenswrapper[5070]: I1213 03:39:22.754592 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wh9rn\" (UniqueName: \"kubernetes.io/projected/a500beac-00af-444d-8ca8-52c3c7c0af60-kube-api-access-wh9rn\") pod \"a500beac-00af-444d-8ca8-52c3c7c0af60\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " Dec 13 03:39:22 crc kubenswrapper[5070]: I1213 03:39:22.754820 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-ssh-key\") pod \"a500beac-00af-444d-8ca8-52c3c7c0af60\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " Dec 13 03:39:22 crc kubenswrapper[5070]: I1213 03:39:22.754856 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-inventory\") pod \"a500beac-00af-444d-8ca8-52c3c7c0af60\" (UID: \"a500beac-00af-444d-8ca8-52c3c7c0af60\") " Dec 13 03:39:22 crc kubenswrapper[5070]: I1213 03:39:22.761746 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a500beac-00af-444d-8ca8-52c3c7c0af60-kube-api-access-wh9rn" (OuterVolumeSpecName: "kube-api-access-wh9rn") pod "a500beac-00af-444d-8ca8-52c3c7c0af60" (UID: "a500beac-00af-444d-8ca8-52c3c7c0af60"). InnerVolumeSpecName "kube-api-access-wh9rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:39:22 crc kubenswrapper[5070]: I1213 03:39:22.788478 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a500beac-00af-444d-8ca8-52c3c7c0af60" (UID: "a500beac-00af-444d-8ca8-52c3c7c0af60"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:39:22 crc kubenswrapper[5070]: I1213 03:39:22.789922 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-inventory" (OuterVolumeSpecName: "inventory") pod "a500beac-00af-444d-8ca8-52c3c7c0af60" (UID: "a500beac-00af-444d-8ca8-52c3c7c0af60"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:39:22 crc kubenswrapper[5070]: I1213 03:39:22.857472 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wh9rn\" (UniqueName: \"kubernetes.io/projected/a500beac-00af-444d-8ca8-52c3c7c0af60-kube-api-access-wh9rn\") on node \"crc\" DevicePath \"\"" Dec 13 03:39:22 crc kubenswrapper[5070]: I1213 03:39:22.857508 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:39:22 crc kubenswrapper[5070]: I1213 03:39:22.857522 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a500beac-00af-444d-8ca8-52c3c7c0af60-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.036359 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-zl6vp"] Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.050415 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-zl6vp"] Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.063169 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-w4rjq"] Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.071417 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-6pt28"] Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.078966 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-w4rjq"] Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.087169 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-6pt28"] Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.251612 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" event={"ID":"a500beac-00af-444d-8ca8-52c3c7c0af60","Type":"ContainerDied","Data":"77379b50722d00f98b13c0c16ca84b52349b656a35e155c17b5fdc30dec5bcb8"} Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.251657 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77379b50722d00f98b13c0c16ca84b52349b656a35e155c17b5fdc30dec5bcb8" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.251729 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.316011 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45"] Dec 13 03:39:23 crc kubenswrapper[5070]: E1213 03:39:23.316843 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a500beac-00af-444d-8ca8-52c3c7c0af60" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.316863 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="a500beac-00af-444d-8ca8-52c3c7c0af60" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.317114 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="a500beac-00af-444d-8ca8-52c3c7c0af60" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.317940 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.322291 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.322596 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.323030 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.325403 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.326783 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45"] Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.365158 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qq5gf\" (UniqueName: \"kubernetes.io/projected/ff5ad124-4227-43d1-ac79-458743c916f1-kube-api-access-qq5gf\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z8z45\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.365237 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z8z45\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.365331 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z8z45\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.466678 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z8z45\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.466804 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z8z45\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.467013 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qq5gf\" (UniqueName: \"kubernetes.io/projected/ff5ad124-4227-43d1-ac79-458743c916f1-kube-api-access-qq5gf\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z8z45\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.471980 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z8z45\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.472009 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z8z45\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.483193 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qq5gf\" (UniqueName: \"kubernetes.io/projected/ff5ad124-4227-43d1-ac79-458743c916f1-kube-api-access-qq5gf\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z8z45\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.652016 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.759990 5070 scope.go:117] "RemoveContainer" containerID="e0fe988d1c0caf283d5376d630dfaa9a0b90a36bff6746aa3bc3248496519874" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.794774 5070 scope.go:117] "RemoveContainer" containerID="8f2d54109003d5c0234f52e6a00aaa806946325be3d82cb61dc337f285d6dff6" Dec 13 03:39:23 crc kubenswrapper[5070]: I1213 03:39:23.859581 5070 scope.go:117] "RemoveContainer" containerID="23812a8e52b8e978b001ee1bbff5c9a499552de5b5ef7b86f3800092d9125a28" Dec 13 03:39:24 crc kubenswrapper[5070]: I1213 03:39:23.886861 5070 scope.go:117] "RemoveContainer" containerID="3517bde0e941ace8c1b85b02564c0bed0194614f4b28d4c3e2f1dde8afe4f9ab" Dec 13 03:39:24 crc kubenswrapper[5070]: I1213 03:39:23.912333 5070 scope.go:117] "RemoveContainer" containerID="8a9c65ec4c5124014f4017580275eb5b077ce044324f4ad9d4abb648b5403875" Dec 13 03:39:24 crc kubenswrapper[5070]: I1213 03:39:23.939281 5070 scope.go:117] "RemoveContainer" containerID="7e6e604e38bcde0bd3acce10218073e9f4f08f51f8e71dffa76e6cf73de8f1ec" Dec 13 03:39:24 crc kubenswrapper[5070]: I1213 03:39:24.182884 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9484e5db-7a78-4875-adfc-274639497868" path="/var/lib/kubelet/pods/9484e5db-7a78-4875-adfc-274639497868/volumes" Dec 13 03:39:24 crc kubenswrapper[5070]: I1213 03:39:24.183516 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7cf4d27-18ae-4936-b68c-0c0bc779e5ba" path="/var/lib/kubelet/pods/b7cf4d27-18ae-4936-b68c-0c0bc779e5ba/volumes" Dec 13 03:39:24 crc kubenswrapper[5070]: I1213 03:39:24.184038 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd5e6065-7d41-43e2-974f-820e716a8f73" path="/var/lib/kubelet/pods/cd5e6065-7d41-43e2-974f-820e716a8f73/volumes" Dec 13 03:39:24 crc kubenswrapper[5070]: I1213 03:39:24.210602 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45"] Dec 13 03:39:24 crc kubenswrapper[5070]: I1213 03:39:24.260286 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" event={"ID":"ff5ad124-4227-43d1-ac79-458743c916f1","Type":"ContainerStarted","Data":"688189d44c4284e3786c960f6321713d2fe6cc8a46e50487ecc2b6d5bdacf8d6"} Dec 13 03:39:25 crc kubenswrapper[5070]: I1213 03:39:25.271829 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" event={"ID":"ff5ad124-4227-43d1-ac79-458743c916f1","Type":"ContainerStarted","Data":"8e505250cf00c718e1776e33659e3e72de6e4d29457840c373af295e0ad5cf7f"} Dec 13 03:39:28 crc kubenswrapper[5070]: I1213 03:39:28.041284 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" podStartSLOduration=4.516527288 podStartE2EDuration="5.04126278s" podCreationTimestamp="2025-12-13 03:39:23 +0000 UTC" firstStartedPulling="2025-12-13 03:39:24.214199281 +0000 UTC m=+1656.450042827" lastFinishedPulling="2025-12-13 03:39:24.738934763 +0000 UTC m=+1656.974778319" observedRunningTime="2025-12-13 03:39:25.297551841 +0000 UTC m=+1657.533395387" watchObservedRunningTime="2025-12-13 03:39:28.04126278 +0000 UTC m=+1660.277106326" Dec 13 03:39:28 crc kubenswrapper[5070]: I1213 03:39:28.044482 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-gxzdc"] Dec 13 03:39:28 crc kubenswrapper[5070]: I1213 03:39:28.054494 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-gxzdc"] Dec 13 03:39:28 crc kubenswrapper[5070]: I1213 03:39:28.181985 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a821721-8311-49af-872d-7ea63f878c61" path="/var/lib/kubelet/pods/5a821721-8311-49af-872d-7ea63f878c61/volumes" Dec 13 03:39:40 crc kubenswrapper[5070]: I1213 03:39:40.037069 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-vv4wt"] Dec 13 03:39:40 crc kubenswrapper[5070]: I1213 03:39:40.046584 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-vv4wt"] Dec 13 03:39:40 crc kubenswrapper[5070]: I1213 03:39:40.180030 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="682caa0d-3a80-41aa-a899-8613f8454481" path="/var/lib/kubelet/pods/682caa0d-3a80-41aa-a899-8613f8454481/volumes" Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.031208 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-1e18-account-create-f4zc5"] Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.041452 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-1e18-account-create-f4zc5"] Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.051264 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-c1b6-account-create-gls4t"] Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.059663 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-c1b6-account-create-gls4t"] Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.067491 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-z9bn5"] Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.073817 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-d5b9-account-create-8mxbm"] Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.080118 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-z9bn5"] Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.086260 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-d5b9-account-create-8mxbm"] Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.183774 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5" path="/var/lib/kubelet/pods/11bc40dc-12b2-46e3-ba04-85ff1fb6f8b5/volumes" Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.184749 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b38e233-37d8-48ae-8e8e-56e97ec0962c" path="/var/lib/kubelet/pods/7b38e233-37d8-48ae-8e8e-56e97ec0962c/volumes" Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.185520 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9abbaf32-db3e-4169-8159-6d707cdf3e2a" path="/var/lib/kubelet/pods/9abbaf32-db3e-4169-8159-6d707cdf3e2a/volumes" Dec 13 03:39:44 crc kubenswrapper[5070]: I1213 03:39:44.187184 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4d4410c-3496-483f-9ce2-7e181589ada3" path="/var/lib/kubelet/pods/f4d4410c-3496-483f-9ce2-7e181589ada3/volumes" Dec 13 03:39:51 crc kubenswrapper[5070]: I1213 03:39:51.946952 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:39:51 crc kubenswrapper[5070]: I1213 03:39:51.947601 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:39:57 crc kubenswrapper[5070]: I1213 03:39:57.040909 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-4v7hg"] Dec 13 03:39:57 crc kubenswrapper[5070]: I1213 03:39:57.048831 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-4v7hg"] Dec 13 03:39:58 crc kubenswrapper[5070]: I1213 03:39:58.183938 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36830906-e994-4294-8cef-75a40f39ede4" path="/var/lib/kubelet/pods/36830906-e994-4294-8cef-75a40f39ede4/volumes" Dec 13 03:40:02 crc kubenswrapper[5070]: I1213 03:40:02.614536 5070 generic.go:334] "Generic (PLEG): container finished" podID="ff5ad124-4227-43d1-ac79-458743c916f1" containerID="8e505250cf00c718e1776e33659e3e72de6e4d29457840c373af295e0ad5cf7f" exitCode=0 Dec 13 03:40:02 crc kubenswrapper[5070]: I1213 03:40:02.614631 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" event={"ID":"ff5ad124-4227-43d1-ac79-458743c916f1","Type":"ContainerDied","Data":"8e505250cf00c718e1776e33659e3e72de6e4d29457840c373af295e0ad5cf7f"} Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.027684 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.075621 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-ssh-key\") pod \"ff5ad124-4227-43d1-ac79-458743c916f1\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.075672 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-inventory\") pod \"ff5ad124-4227-43d1-ac79-458743c916f1\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.076083 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qq5gf\" (UniqueName: \"kubernetes.io/projected/ff5ad124-4227-43d1-ac79-458743c916f1-kube-api-access-qq5gf\") pod \"ff5ad124-4227-43d1-ac79-458743c916f1\" (UID: \"ff5ad124-4227-43d1-ac79-458743c916f1\") " Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.081033 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff5ad124-4227-43d1-ac79-458743c916f1-kube-api-access-qq5gf" (OuterVolumeSpecName: "kube-api-access-qq5gf") pod "ff5ad124-4227-43d1-ac79-458743c916f1" (UID: "ff5ad124-4227-43d1-ac79-458743c916f1"). InnerVolumeSpecName "kube-api-access-qq5gf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.107282 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-inventory" (OuterVolumeSpecName: "inventory") pod "ff5ad124-4227-43d1-ac79-458743c916f1" (UID: "ff5ad124-4227-43d1-ac79-458743c916f1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.107308 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ff5ad124-4227-43d1-ac79-458743c916f1" (UID: "ff5ad124-4227-43d1-ac79-458743c916f1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.177747 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.177806 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff5ad124-4227-43d1-ac79-458743c916f1-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.177841 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qq5gf\" (UniqueName: \"kubernetes.io/projected/ff5ad124-4227-43d1-ac79-458743c916f1-kube-api-access-qq5gf\") on node \"crc\" DevicePath \"\"" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.634662 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" event={"ID":"ff5ad124-4227-43d1-ac79-458743c916f1","Type":"ContainerDied","Data":"688189d44c4284e3786c960f6321713d2fe6cc8a46e50487ecc2b6d5bdacf8d6"} Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.634982 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="688189d44c4284e3786c960f6321713d2fe6cc8a46e50487ecc2b6d5bdacf8d6" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.634704 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.719927 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z"] Dec 13 03:40:04 crc kubenswrapper[5070]: E1213 03:40:04.720279 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff5ad124-4227-43d1-ac79-458743c916f1" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.720295 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff5ad124-4227-43d1-ac79-458743c916f1" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.722910 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff5ad124-4227-43d1-ac79-458743c916f1" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.723555 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.726182 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.726475 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.726620 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.726622 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.729774 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z"] Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.787213 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.787616 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.787733 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whvfp\" (UniqueName: \"kubernetes.io/projected/73eb5f6f-c53f-40a7-950a-901fadf89183-kube-api-access-whvfp\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.889501 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.889607 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.889655 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whvfp\" (UniqueName: \"kubernetes.io/projected/73eb5f6f-c53f-40a7-950a-901fadf89183-kube-api-access-whvfp\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.903542 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.903918 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:04 crc kubenswrapper[5070]: I1213 03:40:04.911973 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whvfp\" (UniqueName: \"kubernetes.io/projected/73eb5f6f-c53f-40a7-950a-901fadf89183-kube-api-access-whvfp\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:05 crc kubenswrapper[5070]: I1213 03:40:05.046040 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:05 crc kubenswrapper[5070]: I1213 03:40:05.630265 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z"] Dec 13 03:40:05 crc kubenswrapper[5070]: I1213 03:40:05.644094 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" event={"ID":"73eb5f6f-c53f-40a7-950a-901fadf89183","Type":"ContainerStarted","Data":"a72d561005bb77f7db2cd68bbe1c65a35a99b252c40fa2e5bc286ddc2dfaf812"} Dec 13 03:40:06 crc kubenswrapper[5070]: I1213 03:40:06.654554 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" event={"ID":"73eb5f6f-c53f-40a7-950a-901fadf89183","Type":"ContainerStarted","Data":"c3f78ea96eb0e9d49c006258ffc53c0220c3c1ac315c01aca660a84df8235b2e"} Dec 13 03:40:06 crc kubenswrapper[5070]: I1213 03:40:06.671935 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" podStartSLOduration=1.9487294560000001 podStartE2EDuration="2.671918391s" podCreationTimestamp="2025-12-13 03:40:04 +0000 UTC" firstStartedPulling="2025-12-13 03:40:05.636839422 +0000 UTC m=+1697.872682968" lastFinishedPulling="2025-12-13 03:40:06.360028317 +0000 UTC m=+1698.595871903" observedRunningTime="2025-12-13 03:40:06.669751772 +0000 UTC m=+1698.905595318" watchObservedRunningTime="2025-12-13 03:40:06.671918391 +0000 UTC m=+1698.907761937" Dec 13 03:40:10 crc kubenswrapper[5070]: I1213 03:40:10.706100 5070 generic.go:334] "Generic (PLEG): container finished" podID="73eb5f6f-c53f-40a7-950a-901fadf89183" containerID="c3f78ea96eb0e9d49c006258ffc53c0220c3c1ac315c01aca660a84df8235b2e" exitCode=0 Dec 13 03:40:10 crc kubenswrapper[5070]: I1213 03:40:10.706163 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" event={"ID":"73eb5f6f-c53f-40a7-950a-901fadf89183","Type":"ContainerDied","Data":"c3f78ea96eb0e9d49c006258ffc53c0220c3c1ac315c01aca660a84df8235b2e"} Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.152924 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.220229 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-inventory\") pod \"73eb5f6f-c53f-40a7-950a-901fadf89183\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.220328 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-ssh-key\") pod \"73eb5f6f-c53f-40a7-950a-901fadf89183\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.220558 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whvfp\" (UniqueName: \"kubernetes.io/projected/73eb5f6f-c53f-40a7-950a-901fadf89183-kube-api-access-whvfp\") pod \"73eb5f6f-c53f-40a7-950a-901fadf89183\" (UID: \"73eb5f6f-c53f-40a7-950a-901fadf89183\") " Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.227703 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73eb5f6f-c53f-40a7-950a-901fadf89183-kube-api-access-whvfp" (OuterVolumeSpecName: "kube-api-access-whvfp") pod "73eb5f6f-c53f-40a7-950a-901fadf89183" (UID: "73eb5f6f-c53f-40a7-950a-901fadf89183"). InnerVolumeSpecName "kube-api-access-whvfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.246498 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-inventory" (OuterVolumeSpecName: "inventory") pod "73eb5f6f-c53f-40a7-950a-901fadf89183" (UID: "73eb5f6f-c53f-40a7-950a-901fadf89183"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.257208 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "73eb5f6f-c53f-40a7-950a-901fadf89183" (UID: "73eb5f6f-c53f-40a7-950a-901fadf89183"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.323560 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whvfp\" (UniqueName: \"kubernetes.io/projected/73eb5f6f-c53f-40a7-950a-901fadf89183-kube-api-access-whvfp\") on node \"crc\" DevicePath \"\"" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.323610 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.323687 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73eb5f6f-c53f-40a7-950a-901fadf89183-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.758006 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" event={"ID":"73eb5f6f-c53f-40a7-950a-901fadf89183","Type":"ContainerDied","Data":"a72d561005bb77f7db2cd68bbe1c65a35a99b252c40fa2e5bc286ddc2dfaf812"} Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.758208 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a72d561005bb77f7db2cd68bbe1c65a35a99b252c40fa2e5bc286ddc2dfaf812" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.758096 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.792584 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m"] Dec 13 03:40:12 crc kubenswrapper[5070]: E1213 03:40:12.792984 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73eb5f6f-c53f-40a7-950a-901fadf89183" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.792998 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="73eb5f6f-c53f-40a7-950a-901fadf89183" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.793214 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="73eb5f6f-c53f-40a7-950a-901fadf89183" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.793882 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.799921 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.799962 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.800153 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.800631 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.806695 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m"] Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.838809 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vm285\" (UniqueName: \"kubernetes.io/projected/06c38db1-1673-4e3f-b4b7-50277a407e82-kube-api-access-vm285\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.838920 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.839164 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.940782 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.940861 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.940932 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vm285\" (UniqueName: \"kubernetes.io/projected/06c38db1-1673-4e3f-b4b7-50277a407e82-kube-api-access-vm285\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.945725 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.948949 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:12 crc kubenswrapper[5070]: I1213 03:40:12.969210 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vm285\" (UniqueName: \"kubernetes.io/projected/06c38db1-1673-4e3f-b4b7-50277a407e82-kube-api-access-vm285\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:13 crc kubenswrapper[5070]: I1213 03:40:13.117307 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:40:13 crc kubenswrapper[5070]: I1213 03:40:13.629526 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m"] Dec 13 03:40:13 crc kubenswrapper[5070]: I1213 03:40:13.767703 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" event={"ID":"06c38db1-1673-4e3f-b4b7-50277a407e82","Type":"ContainerStarted","Data":"7485d58d37725b942fb9c36f1187488e61bab15036000195ed8162d6be77ec06"} Dec 13 03:40:14 crc kubenswrapper[5070]: I1213 03:40:14.780862 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" event={"ID":"06c38db1-1673-4e3f-b4b7-50277a407e82","Type":"ContainerStarted","Data":"bf5459e3cf5fb32afbb32b09315608abc089b4d1911d45685ec44fa39b82af15"} Dec 13 03:40:14 crc kubenswrapper[5070]: I1213 03:40:14.802393 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" podStartSLOduration=2.094533371 podStartE2EDuration="2.802371238s" podCreationTimestamp="2025-12-13 03:40:12 +0000 UTC" firstStartedPulling="2025-12-13 03:40:13.642094215 +0000 UTC m=+1705.877937771" lastFinishedPulling="2025-12-13 03:40:14.349932082 +0000 UTC m=+1706.585775638" observedRunningTime="2025-12-13 03:40:14.796865157 +0000 UTC m=+1707.032708703" watchObservedRunningTime="2025-12-13 03:40:14.802371238 +0000 UTC m=+1707.038214784" Dec 13 03:40:21 crc kubenswrapper[5070]: I1213 03:40:21.943764 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:40:21 crc kubenswrapper[5070]: I1213 03:40:21.944417 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:40:21 crc kubenswrapper[5070]: I1213 03:40:21.944502 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:40:21 crc kubenswrapper[5070]: I1213 03:40:21.945331 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 03:40:21 crc kubenswrapper[5070]: I1213 03:40:21.945399 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" gracePeriod=600 Dec 13 03:40:22 crc kubenswrapper[5070]: E1213 03:40:22.144040 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:40:22 crc kubenswrapper[5070]: I1213 03:40:22.858671 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" exitCode=0 Dec 13 03:40:22 crc kubenswrapper[5070]: I1213 03:40:22.858721 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8"} Dec 13 03:40:22 crc kubenswrapper[5070]: I1213 03:40:22.859006 5070 scope.go:117] "RemoveContainer" containerID="cca15ae9a649254597cf251cc63ec7ab70879a87c44cea845775d4020af3f28d" Dec 13 03:40:22 crc kubenswrapper[5070]: I1213 03:40:22.859697 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:40:22 crc kubenswrapper[5070]: E1213 03:40:22.860055 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:40:24 crc kubenswrapper[5070]: I1213 03:40:24.056644 5070 scope.go:117] "RemoveContainer" containerID="e3ad13f3619e15f567bee08b1d191b5222f8a301d2ba0d9b45caacdfa78a7e65" Dec 13 03:40:24 crc kubenswrapper[5070]: I1213 03:40:24.085256 5070 scope.go:117] "RemoveContainer" containerID="eb464549852bbc83dc78dc390598abeb11fa6b356fd094ad257ca2dd66bae968" Dec 13 03:40:24 crc kubenswrapper[5070]: I1213 03:40:24.133634 5070 scope.go:117] "RemoveContainer" containerID="659801e4f290adc5edaa99e0e84ea4264d3e59db2955b93e6cf5f26c7fadaecf" Dec 13 03:40:24 crc kubenswrapper[5070]: I1213 03:40:24.188238 5070 scope.go:117] "RemoveContainer" containerID="f81f4c1d7b48763ec0adc33440d0506c7adc94587a7453141e397e59e4281c47" Dec 13 03:40:24 crc kubenswrapper[5070]: I1213 03:40:24.212233 5070 scope.go:117] "RemoveContainer" containerID="df723113fedaec855e5a76652062c5c6b1cd858ce93b298cf3e007fb42a9a0fb" Dec 13 03:40:24 crc kubenswrapper[5070]: I1213 03:40:24.265644 5070 scope.go:117] "RemoveContainer" containerID="9b3398d72e77a925b9d06709f760dad67e60bf84e008f913e08077f320815422" Dec 13 03:40:24 crc kubenswrapper[5070]: I1213 03:40:24.289015 5070 scope.go:117] "RemoveContainer" containerID="d964632d8a44a86f34225388cf8000e62afaa95a2549cd9d322698a1817e9987" Dec 13 03:40:24 crc kubenswrapper[5070]: I1213 03:40:24.323170 5070 scope.go:117] "RemoveContainer" containerID="d56059de180814feaec0d4a697e973a90e612d92e0cda462e3c56d3ab55aa883" Dec 13 03:40:24 crc kubenswrapper[5070]: I1213 03:40:24.372381 5070 scope.go:117] "RemoveContainer" containerID="1368fd0d5b1bb1fd7edd55f9cc013293d39694fefbc7d2f11c297555ad6dee55" Dec 13 03:40:24 crc kubenswrapper[5070]: I1213 03:40:24.398107 5070 scope.go:117] "RemoveContainer" containerID="6c655b199cd92d3e80f20e037439480eca6807c7d128932bc536906a328be95e" Dec 13 03:40:29 crc kubenswrapper[5070]: I1213 03:40:29.055418 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-vtvhk"] Dec 13 03:40:29 crc kubenswrapper[5070]: I1213 03:40:29.068684 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-vtvhk"] Dec 13 03:40:30 crc kubenswrapper[5070]: I1213 03:40:30.180374 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3c77f64-0733-4569-b3d9-f112d9e1d02b" path="/var/lib/kubelet/pods/e3c77f64-0733-4569-b3d9-f112d9e1d02b/volumes" Dec 13 03:40:33 crc kubenswrapper[5070]: I1213 03:40:33.167616 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:40:33 crc kubenswrapper[5070]: E1213 03:40:33.168952 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:40:42 crc kubenswrapper[5070]: I1213 03:40:42.035389 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-csqcx"] Dec 13 03:40:42 crc kubenswrapper[5070]: I1213 03:40:42.044497 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-csqcx"] Dec 13 03:40:42 crc kubenswrapper[5070]: I1213 03:40:42.177482 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="759f7e37-a2f6-4a1b-a220-24397c94b928" path="/var/lib/kubelet/pods/759f7e37-a2f6-4a1b-a220-24397c94b928/volumes" Dec 13 03:40:46 crc kubenswrapper[5070]: I1213 03:40:46.025289 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-mp95c"] Dec 13 03:40:46 crc kubenswrapper[5070]: I1213 03:40:46.035542 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-g6ft6"] Dec 13 03:40:46 crc kubenswrapper[5070]: I1213 03:40:46.047701 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-g6ft6"] Dec 13 03:40:46 crc kubenswrapper[5070]: I1213 03:40:46.055696 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-mp95c"] Dec 13 03:40:46 crc kubenswrapper[5070]: I1213 03:40:46.178668 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="092833e5-159b-4c12-8c86-e36d41a1736e" path="/var/lib/kubelet/pods/092833e5-159b-4c12-8c86-e36d41a1736e/volumes" Dec 13 03:40:46 crc kubenswrapper[5070]: I1213 03:40:46.179242 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a201584-81d1-424e-97e0-1b76842274aa" path="/var/lib/kubelet/pods/2a201584-81d1-424e-97e0-1b76842274aa/volumes" Dec 13 03:40:47 crc kubenswrapper[5070]: I1213 03:40:47.029936 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-jhj27"] Dec 13 03:40:47 crc kubenswrapper[5070]: I1213 03:40:47.037402 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-jhj27"] Dec 13 03:40:47 crc kubenswrapper[5070]: I1213 03:40:47.166955 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:40:47 crc kubenswrapper[5070]: E1213 03:40:47.167225 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:40:48 crc kubenswrapper[5070]: I1213 03:40:48.183336 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79694a00-eed5-4e8f-8cff-bf85dced4d4a" path="/var/lib/kubelet/pods/79694a00-eed5-4e8f-8cff-bf85dced4d4a/volumes" Dec 13 03:40:50 crc kubenswrapper[5070]: I1213 03:40:50.037159 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-n7ll5"] Dec 13 03:40:50 crc kubenswrapper[5070]: I1213 03:40:50.046315 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-n7ll5"] Dec 13 03:40:50 crc kubenswrapper[5070]: I1213 03:40:50.179039 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64c718c6-de73-4e08-9506-dce5dc9ebffd" path="/var/lib/kubelet/pods/64c718c6-de73-4e08-9506-dce5dc9ebffd/volumes" Dec 13 03:40:57 crc kubenswrapper[5070]: I1213 03:40:57.042691 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-aa68-account-create-77x5t"] Dec 13 03:40:57 crc kubenswrapper[5070]: I1213 03:40:57.051831 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-aa68-account-create-77x5t"] Dec 13 03:40:58 crc kubenswrapper[5070]: I1213 03:40:58.183327 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="deabd048-0a16-41dc-870a-bd5c5594b612" path="/var/lib/kubelet/pods/deabd048-0a16-41dc-870a-bd5c5594b612/volumes" Dec 13 03:40:59 crc kubenswrapper[5070]: I1213 03:40:59.168785 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:40:59 crc kubenswrapper[5070]: E1213 03:40:59.169239 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:41:02 crc kubenswrapper[5070]: I1213 03:41:02.212202 5070 generic.go:334] "Generic (PLEG): container finished" podID="06c38db1-1673-4e3f-b4b7-50277a407e82" containerID="bf5459e3cf5fb32afbb32b09315608abc089b4d1911d45685ec44fa39b82af15" exitCode=0 Dec 13 03:41:02 crc kubenswrapper[5070]: I1213 03:41:02.212287 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" event={"ID":"06c38db1-1673-4e3f-b4b7-50277a407e82","Type":"ContainerDied","Data":"bf5459e3cf5fb32afbb32b09315608abc089b4d1911d45685ec44fa39b82af15"} Dec 13 03:41:03 crc kubenswrapper[5070]: I1213 03:41:03.637958 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:41:03 crc kubenswrapper[5070]: I1213 03:41:03.757653 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-ssh-key\") pod \"06c38db1-1673-4e3f-b4b7-50277a407e82\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " Dec 13 03:41:03 crc kubenswrapper[5070]: I1213 03:41:03.757987 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-inventory\") pod \"06c38db1-1673-4e3f-b4b7-50277a407e82\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " Dec 13 03:41:03 crc kubenswrapper[5070]: I1213 03:41:03.758104 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vm285\" (UniqueName: \"kubernetes.io/projected/06c38db1-1673-4e3f-b4b7-50277a407e82-kube-api-access-vm285\") pod \"06c38db1-1673-4e3f-b4b7-50277a407e82\" (UID: \"06c38db1-1673-4e3f-b4b7-50277a407e82\") " Dec 13 03:41:03 crc kubenswrapper[5070]: I1213 03:41:03.764887 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06c38db1-1673-4e3f-b4b7-50277a407e82-kube-api-access-vm285" (OuterVolumeSpecName: "kube-api-access-vm285") pod "06c38db1-1673-4e3f-b4b7-50277a407e82" (UID: "06c38db1-1673-4e3f-b4b7-50277a407e82"). InnerVolumeSpecName "kube-api-access-vm285". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:41:03 crc kubenswrapper[5070]: I1213 03:41:03.785038 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "06c38db1-1673-4e3f-b4b7-50277a407e82" (UID: "06c38db1-1673-4e3f-b4b7-50277a407e82"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:41:03 crc kubenswrapper[5070]: I1213 03:41:03.788941 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-inventory" (OuterVolumeSpecName: "inventory") pod "06c38db1-1673-4e3f-b4b7-50277a407e82" (UID: "06c38db1-1673-4e3f-b4b7-50277a407e82"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:41:03 crc kubenswrapper[5070]: I1213 03:41:03.860968 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:03 crc kubenswrapper[5070]: I1213 03:41:03.861168 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vm285\" (UniqueName: \"kubernetes.io/projected/06c38db1-1673-4e3f-b4b7-50277a407e82-kube-api-access-vm285\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:03 crc kubenswrapper[5070]: I1213 03:41:03.861222 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06c38db1-1673-4e3f-b4b7-50277a407e82-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.229712 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" event={"ID":"06c38db1-1673-4e3f-b4b7-50277a407e82","Type":"ContainerDied","Data":"7485d58d37725b942fb9c36f1187488e61bab15036000195ed8162d6be77ec06"} Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.229933 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7485d58d37725b942fb9c36f1187488e61bab15036000195ed8162d6be77ec06" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.229780 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.338033 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-t7hrk"] Dec 13 03:41:04 crc kubenswrapper[5070]: E1213 03:41:04.338545 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c38db1-1673-4e3f-b4b7-50277a407e82" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.338568 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c38db1-1673-4e3f-b4b7-50277a407e82" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.338828 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="06c38db1-1673-4e3f-b4b7-50277a407e82" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.339657 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.341791 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.342208 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.346614 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.347174 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.352622 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-t7hrk"] Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.372660 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzrhq\" (UniqueName: \"kubernetes.io/projected/f3044217-1b50-4e01-8b17-cdb64d3a850e-kube-api-access-qzrhq\") pod \"ssh-known-hosts-edpm-deployment-t7hrk\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.372712 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-t7hrk\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.372769 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-t7hrk\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.475697 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-t7hrk\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.476418 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzrhq\" (UniqueName: \"kubernetes.io/projected/f3044217-1b50-4e01-8b17-cdb64d3a850e-kube-api-access-qzrhq\") pod \"ssh-known-hosts-edpm-deployment-t7hrk\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.476476 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-t7hrk\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.481783 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-t7hrk\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.482688 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-t7hrk\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.498873 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzrhq\" (UniqueName: \"kubernetes.io/projected/f3044217-1b50-4e01-8b17-cdb64d3a850e-kube-api-access-qzrhq\") pod \"ssh-known-hosts-edpm-deployment-t7hrk\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:04 crc kubenswrapper[5070]: I1213 03:41:04.655147 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:05 crc kubenswrapper[5070]: I1213 03:41:05.184257 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-t7hrk"] Dec 13 03:41:05 crc kubenswrapper[5070]: I1213 03:41:05.238499 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" event={"ID":"f3044217-1b50-4e01-8b17-cdb64d3a850e","Type":"ContainerStarted","Data":"3f1bdcb1023a467471f7a40f798e13eb0c5dfc6430a3aba92ca4a0a6babc273a"} Dec 13 03:41:06 crc kubenswrapper[5070]: I1213 03:41:06.034221 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-e920-account-create-c57q5"] Dec 13 03:41:06 crc kubenswrapper[5070]: I1213 03:41:06.052516 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-0b75-account-create-f5fw5"] Dec 13 03:41:06 crc kubenswrapper[5070]: I1213 03:41:06.069420 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-e920-account-create-c57q5"] Dec 13 03:41:06 crc kubenswrapper[5070]: I1213 03:41:06.077026 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-0b75-account-create-f5fw5"] Dec 13 03:41:06 crc kubenswrapper[5070]: I1213 03:41:06.177863 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="528f5eba-2b8a-478c-a09f-6e97729d2b31" path="/var/lib/kubelet/pods/528f5eba-2b8a-478c-a09f-6e97729d2b31/volumes" Dec 13 03:41:06 crc kubenswrapper[5070]: I1213 03:41:06.178514 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d30e4476-3bb4-4ac3-b30f-67d2900ab0c1" path="/var/lib/kubelet/pods/d30e4476-3bb4-4ac3-b30f-67d2900ab0c1/volumes" Dec 13 03:41:07 crc kubenswrapper[5070]: I1213 03:41:07.256571 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" event={"ID":"f3044217-1b50-4e01-8b17-cdb64d3a850e","Type":"ContainerStarted","Data":"7b041109717aec09fcaf235f689783e0441fde6859e1394d741bd4952e5d7436"} Dec 13 03:41:07 crc kubenswrapper[5070]: I1213 03:41:07.275267 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" podStartSLOduration=2.283433193 podStartE2EDuration="3.275252333s" podCreationTimestamp="2025-12-13 03:41:04 +0000 UTC" firstStartedPulling="2025-12-13 03:41:05.18894196 +0000 UTC m=+1757.424785506" lastFinishedPulling="2025-12-13 03:41:06.1807611 +0000 UTC m=+1758.416604646" observedRunningTime="2025-12-13 03:41:07.269381452 +0000 UTC m=+1759.505224998" watchObservedRunningTime="2025-12-13 03:41:07.275252333 +0000 UTC m=+1759.511095879" Dec 13 03:41:10 crc kubenswrapper[5070]: I1213 03:41:10.168001 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:41:10 crc kubenswrapper[5070]: E1213 03:41:10.169075 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:41:14 crc kubenswrapper[5070]: I1213 03:41:14.319156 5070 generic.go:334] "Generic (PLEG): container finished" podID="f3044217-1b50-4e01-8b17-cdb64d3a850e" containerID="7b041109717aec09fcaf235f689783e0441fde6859e1394d741bd4952e5d7436" exitCode=0 Dec 13 03:41:14 crc kubenswrapper[5070]: I1213 03:41:14.319246 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" event={"ID":"f3044217-1b50-4e01-8b17-cdb64d3a850e","Type":"ContainerDied","Data":"7b041109717aec09fcaf235f689783e0441fde6859e1394d741bd4952e5d7436"} Dec 13 03:41:15 crc kubenswrapper[5070]: I1213 03:41:15.720432 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:15 crc kubenswrapper[5070]: I1213 03:41:15.916249 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzrhq\" (UniqueName: \"kubernetes.io/projected/f3044217-1b50-4e01-8b17-cdb64d3a850e-kube-api-access-qzrhq\") pod \"f3044217-1b50-4e01-8b17-cdb64d3a850e\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " Dec 13 03:41:15 crc kubenswrapper[5070]: I1213 03:41:15.916723 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-ssh-key-openstack-edpm-ipam\") pod \"f3044217-1b50-4e01-8b17-cdb64d3a850e\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " Dec 13 03:41:15 crc kubenswrapper[5070]: I1213 03:41:15.916750 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-inventory-0\") pod \"f3044217-1b50-4e01-8b17-cdb64d3a850e\" (UID: \"f3044217-1b50-4e01-8b17-cdb64d3a850e\") " Dec 13 03:41:15 crc kubenswrapper[5070]: I1213 03:41:15.921672 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3044217-1b50-4e01-8b17-cdb64d3a850e-kube-api-access-qzrhq" (OuterVolumeSpecName: "kube-api-access-qzrhq") pod "f3044217-1b50-4e01-8b17-cdb64d3a850e" (UID: "f3044217-1b50-4e01-8b17-cdb64d3a850e"). InnerVolumeSpecName "kube-api-access-qzrhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:41:15 crc kubenswrapper[5070]: I1213 03:41:15.943154 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "f3044217-1b50-4e01-8b17-cdb64d3a850e" (UID: "f3044217-1b50-4e01-8b17-cdb64d3a850e"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:41:15 crc kubenswrapper[5070]: I1213 03:41:15.947163 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "f3044217-1b50-4e01-8b17-cdb64d3a850e" (UID: "f3044217-1b50-4e01-8b17-cdb64d3a850e"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.019598 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.019632 5070 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f3044217-1b50-4e01-8b17-cdb64d3a850e-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.019642 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzrhq\" (UniqueName: \"kubernetes.io/projected/f3044217-1b50-4e01-8b17-cdb64d3a850e-kube-api-access-qzrhq\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.339404 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" event={"ID":"f3044217-1b50-4e01-8b17-cdb64d3a850e","Type":"ContainerDied","Data":"3f1bdcb1023a467471f7a40f798e13eb0c5dfc6430a3aba92ca4a0a6babc273a"} Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.339806 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f1bdcb1023a467471f7a40f798e13eb0c5dfc6430a3aba92ca4a0a6babc273a" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.339540 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-t7hrk" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.405691 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9"] Dec 13 03:41:16 crc kubenswrapper[5070]: E1213 03:41:16.406126 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3044217-1b50-4e01-8b17-cdb64d3a850e" containerName="ssh-known-hosts-edpm-deployment" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.406145 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3044217-1b50-4e01-8b17-cdb64d3a850e" containerName="ssh-known-hosts-edpm-deployment" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.406319 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3044217-1b50-4e01-8b17-cdb64d3a850e" containerName="ssh-known-hosts-edpm-deployment" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.406921 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.409847 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.409950 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.410079 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.410192 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.421254 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9"] Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.444865 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6xnt9\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.445028 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6xnt9\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.445074 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-888z5\" (UniqueName: \"kubernetes.io/projected/ab3ac857-35ce-4137-929a-5f1162c4ca8a-kube-api-access-888z5\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6xnt9\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.547489 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6xnt9\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.548774 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-888z5\" (UniqueName: \"kubernetes.io/projected/ab3ac857-35ce-4137-929a-5f1162c4ca8a-kube-api-access-888z5\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6xnt9\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.548982 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6xnt9\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.552969 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6xnt9\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.553048 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6xnt9\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.564549 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-888z5\" (UniqueName: \"kubernetes.io/projected/ab3ac857-35ce-4137-929a-5f1162c4ca8a-kube-api-access-888z5\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6xnt9\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:16 crc kubenswrapper[5070]: I1213 03:41:16.747687 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:17 crc kubenswrapper[5070]: I1213 03:41:17.289414 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9"] Dec 13 03:41:17 crc kubenswrapper[5070]: I1213 03:41:17.351363 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" event={"ID":"ab3ac857-35ce-4137-929a-5f1162c4ca8a","Type":"ContainerStarted","Data":"70f079690fb49101c6e70066aaaea52e2a36dee32eed1c4c952702825e0d5d63"} Dec 13 03:41:18 crc kubenswrapper[5070]: I1213 03:41:18.361909 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" event={"ID":"ab3ac857-35ce-4137-929a-5f1162c4ca8a","Type":"ContainerStarted","Data":"9ff69f5be9484c05bc7111b01625c104c2763c33f7fad88bee56816b490fcfe4"} Dec 13 03:41:18 crc kubenswrapper[5070]: I1213 03:41:18.385849 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" podStartSLOduration=1.917565121 podStartE2EDuration="2.385832481s" podCreationTimestamp="2025-12-13 03:41:16 +0000 UTC" firstStartedPulling="2025-12-13 03:41:17.305631758 +0000 UTC m=+1769.541475304" lastFinishedPulling="2025-12-13 03:41:17.773899078 +0000 UTC m=+1770.009742664" observedRunningTime="2025-12-13 03:41:18.37700507 +0000 UTC m=+1770.612848636" watchObservedRunningTime="2025-12-13 03:41:18.385832481 +0000 UTC m=+1770.621676027" Dec 13 03:41:24 crc kubenswrapper[5070]: I1213 03:41:24.167751 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:41:24 crc kubenswrapper[5070]: E1213 03:41:24.168465 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:41:24 crc kubenswrapper[5070]: I1213 03:41:24.579163 5070 scope.go:117] "RemoveContainer" containerID="fab62b0e7c57d7e1c178aa10adeed51a72cfe52cdbbc824e57321a3b7b693f33" Dec 13 03:41:24 crc kubenswrapper[5070]: I1213 03:41:24.637580 5070 scope.go:117] "RemoveContainer" containerID="e673b980a21e64ee48353347317ea3c360dcd636d75218beb85a42229cabdc03" Dec 13 03:41:24 crc kubenswrapper[5070]: I1213 03:41:24.664864 5070 scope.go:117] "RemoveContainer" containerID="5858f0b9a770bd713635250a15e2ac4cd7c62f33e94600bc9a0dca82f946c0f6" Dec 13 03:41:24 crc kubenswrapper[5070]: I1213 03:41:24.705419 5070 scope.go:117] "RemoveContainer" containerID="cdc3c34597e656a1c9846b041a60e66893cbca6229fcec6c52c214665c0cb39a" Dec 13 03:41:24 crc kubenswrapper[5070]: I1213 03:41:24.745881 5070 scope.go:117] "RemoveContainer" containerID="4f20ee785171c7c656f65f747eb262791a9a0023f2a95864f119504c654dd5f8" Dec 13 03:41:24 crc kubenswrapper[5070]: I1213 03:41:24.816635 5070 scope.go:117] "RemoveContainer" containerID="9f6fe4c0efc92637e10e236487fa1ece566a9be43bc770441d7e4b7e61a38094" Dec 13 03:41:24 crc kubenswrapper[5070]: I1213 03:41:24.843254 5070 scope.go:117] "RemoveContainer" containerID="63906721537da9234e81e89109061e80a164247eefe2b755473c3d94f88ea6a2" Dec 13 03:41:24 crc kubenswrapper[5070]: I1213 03:41:24.864260 5070 scope.go:117] "RemoveContainer" containerID="a96fc39999f9ae75d2f09170119f6a6de057b1e4f7cb2c35eb09c60904ad85e0" Dec 13 03:41:24 crc kubenswrapper[5070]: I1213 03:41:24.882946 5070 scope.go:117] "RemoveContainer" containerID="7a29c9bb0683c47b91b2cb9bf3a68c1aa41a48307bb8349ec07d65d45a4594cb" Dec 13 03:41:26 crc kubenswrapper[5070]: I1213 03:41:26.425542 5070 generic.go:334] "Generic (PLEG): container finished" podID="ab3ac857-35ce-4137-929a-5f1162c4ca8a" containerID="9ff69f5be9484c05bc7111b01625c104c2763c33f7fad88bee56816b490fcfe4" exitCode=0 Dec 13 03:41:26 crc kubenswrapper[5070]: I1213 03:41:26.425602 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" event={"ID":"ab3ac857-35ce-4137-929a-5f1162c4ca8a","Type":"ContainerDied","Data":"9ff69f5be9484c05bc7111b01625c104c2763c33f7fad88bee56816b490fcfe4"} Dec 13 03:41:27 crc kubenswrapper[5070]: I1213 03:41:27.818177 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:27 crc kubenswrapper[5070]: I1213 03:41:27.963076 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-888z5\" (UniqueName: \"kubernetes.io/projected/ab3ac857-35ce-4137-929a-5f1162c4ca8a-kube-api-access-888z5\") pod \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " Dec 13 03:41:27 crc kubenswrapper[5070]: I1213 03:41:27.963533 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-ssh-key\") pod \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " Dec 13 03:41:27 crc kubenswrapper[5070]: I1213 03:41:27.963590 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-inventory\") pod \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\" (UID: \"ab3ac857-35ce-4137-929a-5f1162c4ca8a\") " Dec 13 03:41:27 crc kubenswrapper[5070]: I1213 03:41:27.968832 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab3ac857-35ce-4137-929a-5f1162c4ca8a-kube-api-access-888z5" (OuterVolumeSpecName: "kube-api-access-888z5") pod "ab3ac857-35ce-4137-929a-5f1162c4ca8a" (UID: "ab3ac857-35ce-4137-929a-5f1162c4ca8a"). InnerVolumeSpecName "kube-api-access-888z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:41:27 crc kubenswrapper[5070]: I1213 03:41:27.989943 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ab3ac857-35ce-4137-929a-5f1162c4ca8a" (UID: "ab3ac857-35ce-4137-929a-5f1162c4ca8a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:41:27 crc kubenswrapper[5070]: I1213 03:41:27.993319 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-inventory" (OuterVolumeSpecName: "inventory") pod "ab3ac857-35ce-4137-929a-5f1162c4ca8a" (UID: "ab3ac857-35ce-4137-929a-5f1162c4ca8a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.066130 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-888z5\" (UniqueName: \"kubernetes.io/projected/ab3ac857-35ce-4137-929a-5f1162c4ca8a-kube-api-access-888z5\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.066167 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.066179 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab3ac857-35ce-4137-929a-5f1162c4ca8a-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.444034 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" event={"ID":"ab3ac857-35ce-4137-929a-5f1162c4ca8a","Type":"ContainerDied","Data":"70f079690fb49101c6e70066aaaea52e2a36dee32eed1c4c952702825e0d5d63"} Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.444095 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70f079690fb49101c6e70066aaaea52e2a36dee32eed1c4c952702825e0d5d63" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.444140 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.519622 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m"] Dec 13 03:41:28 crc kubenswrapper[5070]: E1213 03:41:28.520102 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3ac857-35ce-4137-929a-5f1162c4ca8a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.520121 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3ac857-35ce-4137-929a-5f1162c4ca8a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.520296 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3ac857-35ce-4137-929a-5f1162c4ca8a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.520912 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.523890 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.523921 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.524365 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.525216 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.533588 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m"] Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.676828 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqxfs\" (UniqueName: \"kubernetes.io/projected/2f82187b-9034-41c1-b08c-a18a14623fdb-kube-api-access-qqxfs\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.677095 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.677186 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.778909 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqxfs\" (UniqueName: \"kubernetes.io/projected/2f82187b-9034-41c1-b08c-a18a14623fdb-kube-api-access-qqxfs\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.779101 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.779153 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.783107 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.784202 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.796402 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqxfs\" (UniqueName: \"kubernetes.io/projected/2f82187b-9034-41c1-b08c-a18a14623fdb-kube-api-access-qqxfs\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:28 crc kubenswrapper[5070]: I1213 03:41:28.839816 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:29 crc kubenswrapper[5070]: I1213 03:41:29.051106 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l8q4l"] Dec 13 03:41:29 crc kubenswrapper[5070]: I1213 03:41:29.061220 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-l8q4l"] Dec 13 03:41:29 crc kubenswrapper[5070]: I1213 03:41:29.153061 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m"] Dec 13 03:41:29 crc kubenswrapper[5070]: I1213 03:41:29.462723 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" event={"ID":"2f82187b-9034-41c1-b08c-a18a14623fdb","Type":"ContainerStarted","Data":"db391bb385581de3ade79e3e55277f60865fb72106db743768ba870c1eea5db9"} Dec 13 03:41:30 crc kubenswrapper[5070]: I1213 03:41:30.184926 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d28fde7-7ce2-4aa6-9412-dae4f27cb494" path="/var/lib/kubelet/pods/7d28fde7-7ce2-4aa6-9412-dae4f27cb494/volumes" Dec 13 03:41:30 crc kubenswrapper[5070]: I1213 03:41:30.472128 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" event={"ID":"2f82187b-9034-41c1-b08c-a18a14623fdb","Type":"ContainerStarted","Data":"70b3dde64458f0a590eebc57cf29df0d3f41e03d2f5f4b406630c9487dd866b2"} Dec 13 03:41:30 crc kubenswrapper[5070]: I1213 03:41:30.487699 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" podStartSLOduration=1.942025841 podStartE2EDuration="2.487681805s" podCreationTimestamp="2025-12-13 03:41:28 +0000 UTC" firstStartedPulling="2025-12-13 03:41:29.156516311 +0000 UTC m=+1781.392359857" lastFinishedPulling="2025-12-13 03:41:29.702172275 +0000 UTC m=+1781.938015821" observedRunningTime="2025-12-13 03:41:30.486714697 +0000 UTC m=+1782.722558243" watchObservedRunningTime="2025-12-13 03:41:30.487681805 +0000 UTC m=+1782.723525351" Dec 13 03:41:38 crc kubenswrapper[5070]: I1213 03:41:38.172183 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:41:38 crc kubenswrapper[5070]: E1213 03:41:38.172980 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:41:39 crc kubenswrapper[5070]: I1213 03:41:39.564778 5070 generic.go:334] "Generic (PLEG): container finished" podID="2f82187b-9034-41c1-b08c-a18a14623fdb" containerID="70b3dde64458f0a590eebc57cf29df0d3f41e03d2f5f4b406630c9487dd866b2" exitCode=0 Dec 13 03:41:39 crc kubenswrapper[5070]: I1213 03:41:39.564810 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" event={"ID":"2f82187b-9034-41c1-b08c-a18a14623fdb","Type":"ContainerDied","Data":"70b3dde64458f0a590eebc57cf29df0d3f41e03d2f5f4b406630c9487dd866b2"} Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.015174 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.119894 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-inventory\") pod \"2f82187b-9034-41c1-b08c-a18a14623fdb\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.120075 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqxfs\" (UniqueName: \"kubernetes.io/projected/2f82187b-9034-41c1-b08c-a18a14623fdb-kube-api-access-qqxfs\") pod \"2f82187b-9034-41c1-b08c-a18a14623fdb\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.120128 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-ssh-key\") pod \"2f82187b-9034-41c1-b08c-a18a14623fdb\" (UID: \"2f82187b-9034-41c1-b08c-a18a14623fdb\") " Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.126710 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f82187b-9034-41c1-b08c-a18a14623fdb-kube-api-access-qqxfs" (OuterVolumeSpecName: "kube-api-access-qqxfs") pod "2f82187b-9034-41c1-b08c-a18a14623fdb" (UID: "2f82187b-9034-41c1-b08c-a18a14623fdb"). InnerVolumeSpecName "kube-api-access-qqxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.149508 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-inventory" (OuterVolumeSpecName: "inventory") pod "2f82187b-9034-41c1-b08c-a18a14623fdb" (UID: "2f82187b-9034-41c1-b08c-a18a14623fdb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.155358 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2f82187b-9034-41c1-b08c-a18a14623fdb" (UID: "2f82187b-9034-41c1-b08c-a18a14623fdb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.222770 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqxfs\" (UniqueName: \"kubernetes.io/projected/2f82187b-9034-41c1-b08c-a18a14623fdb-kube-api-access-qqxfs\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.222815 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.222825 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2f82187b-9034-41c1-b08c-a18a14623fdb-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.585872 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" event={"ID":"2f82187b-9034-41c1-b08c-a18a14623fdb","Type":"ContainerDied","Data":"db391bb385581de3ade79e3e55277f60865fb72106db743768ba870c1eea5db9"} Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.585923 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db391bb385581de3ade79e3e55277f60865fb72106db743768ba870c1eea5db9" Dec 13 03:41:41 crc kubenswrapper[5070]: I1213 03:41:41.585980 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m" Dec 13 03:41:51 crc kubenswrapper[5070]: I1213 03:41:51.166998 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:41:51 crc kubenswrapper[5070]: E1213 03:41:51.167655 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:41:52 crc kubenswrapper[5070]: I1213 03:41:52.056563 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-x5tqc"] Dec 13 03:41:52 crc kubenswrapper[5070]: I1213 03:41:52.063703 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-x5tqc"] Dec 13 03:41:52 crc kubenswrapper[5070]: I1213 03:41:52.184222 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21e9c870-f7e7-4ddf-abbb-56911912f97f" path="/var/lib/kubelet/pods/21e9c870-f7e7-4ddf-abbb-56911912f97f/volumes" Dec 13 03:41:55 crc kubenswrapper[5070]: I1213 03:41:55.041574 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6ltlj"] Dec 13 03:41:55 crc kubenswrapper[5070]: I1213 03:41:55.048971 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6ltlj"] Dec 13 03:41:56 crc kubenswrapper[5070]: I1213 03:41:56.179781 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19a2a29e-7784-4e4f-99bc-dad38cff1b50" path="/var/lib/kubelet/pods/19a2a29e-7784-4e4f-99bc-dad38cff1b50/volumes" Dec 13 03:42:03 crc kubenswrapper[5070]: I1213 03:42:03.167537 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:42:03 crc kubenswrapper[5070]: E1213 03:42:03.168986 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:42:16 crc kubenswrapper[5070]: I1213 03:42:16.167350 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:42:16 crc kubenswrapper[5070]: E1213 03:42:16.168107 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:42:25 crc kubenswrapper[5070]: I1213 03:42:25.063468 5070 scope.go:117] "RemoveContainer" containerID="0cd6fd61fefd84c776775418027b78e5fb60f51884e89d6db56e9c5b3723b879" Dec 13 03:42:25 crc kubenswrapper[5070]: I1213 03:42:25.116107 5070 scope.go:117] "RemoveContainer" containerID="d6d64e5d266d635a927f5bef7b322688f4a55e133f22f490d3dd1976ed4c5735" Dec 13 03:42:25 crc kubenswrapper[5070]: I1213 03:42:25.162797 5070 scope.go:117] "RemoveContainer" containerID="3e361442e6367decca6e454c4bfd5e4caa44d7ea8f7e3176ff123cb097250947" Dec 13 03:42:29 crc kubenswrapper[5070]: I1213 03:42:29.167483 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:42:29 crc kubenswrapper[5070]: E1213 03:42:29.167889 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:42:38 crc kubenswrapper[5070]: I1213 03:42:38.051211 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-n59t8"] Dec 13 03:42:38 crc kubenswrapper[5070]: I1213 03:42:38.063043 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-n59t8"] Dec 13 03:42:38 crc kubenswrapper[5070]: I1213 03:42:38.176066 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a485ad64-a9af-43f3-863a-0ee52612df72" path="/var/lib/kubelet/pods/a485ad64-a9af-43f3-863a-0ee52612df72/volumes" Dec 13 03:42:40 crc kubenswrapper[5070]: I1213 03:42:40.167737 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:42:40 crc kubenswrapper[5070]: E1213 03:42:40.168592 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:42:54 crc kubenswrapper[5070]: I1213 03:42:54.166552 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:42:54 crc kubenswrapper[5070]: E1213 03:42:54.167294 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.823567 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kzxts"] Dec 13 03:42:56 crc kubenswrapper[5070]: E1213 03:42:56.824238 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f82187b-9034-41c1-b08c-a18a14623fdb" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.824255 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f82187b-9034-41c1-b08c-a18a14623fdb" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.824495 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f82187b-9034-41c1-b08c-a18a14623fdb" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.825856 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.833074 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kzxts"] Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.892248 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b48f7\" (UniqueName: \"kubernetes.io/projected/5583bfce-70ea-42f9-ac42-2c871e93fcdf-kube-api-access-b48f7\") pod \"redhat-operators-kzxts\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.892311 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-utilities\") pod \"redhat-operators-kzxts\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.892351 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-catalog-content\") pod \"redhat-operators-kzxts\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.994377 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b48f7\" (UniqueName: \"kubernetes.io/projected/5583bfce-70ea-42f9-ac42-2c871e93fcdf-kube-api-access-b48f7\") pod \"redhat-operators-kzxts\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.994463 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-utilities\") pod \"redhat-operators-kzxts\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.994507 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-catalog-content\") pod \"redhat-operators-kzxts\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.995048 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-catalog-content\") pod \"redhat-operators-kzxts\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:56 crc kubenswrapper[5070]: I1213 03:42:56.995280 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-utilities\") pod \"redhat-operators-kzxts\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:57 crc kubenswrapper[5070]: I1213 03:42:57.020905 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b48f7\" (UniqueName: \"kubernetes.io/projected/5583bfce-70ea-42f9-ac42-2c871e93fcdf-kube-api-access-b48f7\") pod \"redhat-operators-kzxts\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:57 crc kubenswrapper[5070]: I1213 03:42:57.175869 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:42:57 crc kubenswrapper[5070]: I1213 03:42:57.523337 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kzxts"] Dec 13 03:42:58 crc kubenswrapper[5070]: I1213 03:42:58.402051 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kzxts" event={"ID":"5583bfce-70ea-42f9-ac42-2c871e93fcdf","Type":"ContainerStarted","Data":"0f7cd72830817e040d52d3687ba7a88a0b38b48ff5870c09e8bc4d15452cd367"} Dec 13 03:42:58 crc kubenswrapper[5070]: I1213 03:42:58.402264 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kzxts" event={"ID":"5583bfce-70ea-42f9-ac42-2c871e93fcdf","Type":"ContainerStarted","Data":"5826eab0f06869982a0baee15577ba9bd55025b45eabfaad1e2da3a6ae5a15a6"} Dec 13 03:42:59 crc kubenswrapper[5070]: I1213 03:42:59.411775 5070 generic.go:334] "Generic (PLEG): container finished" podID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerID="0f7cd72830817e040d52d3687ba7a88a0b38b48ff5870c09e8bc4d15452cd367" exitCode=0 Dec 13 03:42:59 crc kubenswrapper[5070]: I1213 03:42:59.411889 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kzxts" event={"ID":"5583bfce-70ea-42f9-ac42-2c871e93fcdf","Type":"ContainerDied","Data":"0f7cd72830817e040d52d3687ba7a88a0b38b48ff5870c09e8bc4d15452cd367"} Dec 13 03:42:59 crc kubenswrapper[5070]: I1213 03:42:59.414264 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 03:43:02 crc kubenswrapper[5070]: I1213 03:43:02.441681 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kzxts" event={"ID":"5583bfce-70ea-42f9-ac42-2c871e93fcdf","Type":"ContainerStarted","Data":"b66a78a3a43bee42192992bf0a0d6271e2863a9796307579cd4528b73b4a28d3"} Dec 13 03:43:05 crc kubenswrapper[5070]: I1213 03:43:05.166878 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:43:05 crc kubenswrapper[5070]: E1213 03:43:05.167360 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:43:05 crc kubenswrapper[5070]: I1213 03:43:05.472368 5070 generic.go:334] "Generic (PLEG): container finished" podID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerID="b66a78a3a43bee42192992bf0a0d6271e2863a9796307579cd4528b73b4a28d3" exitCode=0 Dec 13 03:43:05 crc kubenswrapper[5070]: I1213 03:43:05.472503 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kzxts" event={"ID":"5583bfce-70ea-42f9-ac42-2c871e93fcdf","Type":"ContainerDied","Data":"b66a78a3a43bee42192992bf0a0d6271e2863a9796307579cd4528b73b4a28d3"} Dec 13 03:43:06 crc kubenswrapper[5070]: I1213 03:43:06.484262 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kzxts" event={"ID":"5583bfce-70ea-42f9-ac42-2c871e93fcdf","Type":"ContainerStarted","Data":"a5f6ef9ab7beb4658dc12a40b8902ae02ac3bd2cc3cadfbf1e7eea473507e774"} Dec 13 03:43:06 crc kubenswrapper[5070]: I1213 03:43:06.506970 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kzxts" podStartSLOduration=3.949736111 podStartE2EDuration="10.50695075s" podCreationTimestamp="2025-12-13 03:42:56 +0000 UTC" firstStartedPulling="2025-12-13 03:42:59.41404501 +0000 UTC m=+1871.649888556" lastFinishedPulling="2025-12-13 03:43:05.971259609 +0000 UTC m=+1878.207103195" observedRunningTime="2025-12-13 03:43:06.500050552 +0000 UTC m=+1878.735894098" watchObservedRunningTime="2025-12-13 03:43:06.50695075 +0000 UTC m=+1878.742794296" Dec 13 03:43:07 crc kubenswrapper[5070]: I1213 03:43:07.176172 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:43:07 crc kubenswrapper[5070]: I1213 03:43:07.176224 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:43:08 crc kubenswrapper[5070]: I1213 03:43:08.228831 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kzxts" podUID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerName="registry-server" probeResult="failure" output=< Dec 13 03:43:08 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 03:43:08 crc kubenswrapper[5070]: > Dec 13 03:43:17 crc kubenswrapper[5070]: I1213 03:43:17.264698 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:43:17 crc kubenswrapper[5070]: I1213 03:43:17.331318 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:43:17 crc kubenswrapper[5070]: I1213 03:43:17.503657 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kzxts"] Dec 13 03:43:18 crc kubenswrapper[5070]: I1213 03:43:18.592462 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kzxts" podUID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerName="registry-server" containerID="cri-o://a5f6ef9ab7beb4658dc12a40b8902ae02ac3bd2cc3cadfbf1e7eea473507e774" gracePeriod=2 Dec 13 03:43:18 crc kubenswrapper[5070]: E1213 03:43:18.874932 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5583bfce_70ea_42f9_ac42_2c871e93fcdf.slice/crio-a5f6ef9ab7beb4658dc12a40b8902ae02ac3bd2cc3cadfbf1e7eea473507e774.scope\": RecentStats: unable to find data in memory cache]" Dec 13 03:43:19 crc kubenswrapper[5070]: I1213 03:43:19.603110 5070 generic.go:334] "Generic (PLEG): container finished" podID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerID="a5f6ef9ab7beb4658dc12a40b8902ae02ac3bd2cc3cadfbf1e7eea473507e774" exitCode=0 Dec 13 03:43:19 crc kubenswrapper[5070]: I1213 03:43:19.603163 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kzxts" event={"ID":"5583bfce-70ea-42f9-ac42-2c871e93fcdf","Type":"ContainerDied","Data":"a5f6ef9ab7beb4658dc12a40b8902ae02ac3bd2cc3cadfbf1e7eea473507e774"} Dec 13 03:43:20 crc kubenswrapper[5070]: I1213 03:43:20.167573 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:43:20 crc kubenswrapper[5070]: E1213 03:43:20.168029 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.036648 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.194205 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-utilities\") pod \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.194315 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-catalog-content\") pod \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.194389 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b48f7\" (UniqueName: \"kubernetes.io/projected/5583bfce-70ea-42f9-ac42-2c871e93fcdf-kube-api-access-b48f7\") pod \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\" (UID: \"5583bfce-70ea-42f9-ac42-2c871e93fcdf\") " Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.195126 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-utilities" (OuterVolumeSpecName: "utilities") pod "5583bfce-70ea-42f9-ac42-2c871e93fcdf" (UID: "5583bfce-70ea-42f9-ac42-2c871e93fcdf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.200508 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5583bfce-70ea-42f9-ac42-2c871e93fcdf-kube-api-access-b48f7" (OuterVolumeSpecName: "kube-api-access-b48f7") pod "5583bfce-70ea-42f9-ac42-2c871e93fcdf" (UID: "5583bfce-70ea-42f9-ac42-2c871e93fcdf"). InnerVolumeSpecName "kube-api-access-b48f7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.296922 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b48f7\" (UniqueName: \"kubernetes.io/projected/5583bfce-70ea-42f9-ac42-2c871e93fcdf-kube-api-access-b48f7\") on node \"crc\" DevicePath \"\"" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.296976 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.300276 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5583bfce-70ea-42f9-ac42-2c871e93fcdf" (UID: "5583bfce-70ea-42f9-ac42-2c871e93fcdf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.398866 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5583bfce-70ea-42f9-ac42-2c871e93fcdf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.623048 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kzxts" event={"ID":"5583bfce-70ea-42f9-ac42-2c871e93fcdf","Type":"ContainerDied","Data":"5826eab0f06869982a0baee15577ba9bd55025b45eabfaad1e2da3a6ae5a15a6"} Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.623098 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kzxts" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.623128 5070 scope.go:117] "RemoveContainer" containerID="a5f6ef9ab7beb4658dc12a40b8902ae02ac3bd2cc3cadfbf1e7eea473507e774" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.651901 5070 scope.go:117] "RemoveContainer" containerID="b66a78a3a43bee42192992bf0a0d6271e2863a9796307579cd4528b73b4a28d3" Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.661616 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kzxts"] Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.669955 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kzxts"] Dec 13 03:43:21 crc kubenswrapper[5070]: I1213 03:43:21.677520 5070 scope.go:117] "RemoveContainer" containerID="0f7cd72830817e040d52d3687ba7a88a0b38b48ff5870c09e8bc4d15452cd367" Dec 13 03:43:22 crc kubenswrapper[5070]: I1213 03:43:22.180776 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" path="/var/lib/kubelet/pods/5583bfce-70ea-42f9-ac42-2c871e93fcdf/volumes" Dec 13 03:43:25 crc kubenswrapper[5070]: I1213 03:43:25.261394 5070 scope.go:117] "RemoveContainer" containerID="6f644a434838affefee596628949bb8f090c1d7a31bb167fba52cfd8addfe0e9" Dec 13 03:43:33 crc kubenswrapper[5070]: I1213 03:43:33.167555 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:43:33 crc kubenswrapper[5070]: E1213 03:43:33.168378 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:43:45 crc kubenswrapper[5070]: I1213 03:43:45.167203 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:43:45 crc kubenswrapper[5070]: E1213 03:43:45.168142 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.264768 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-24z2t"] Dec 13 03:43:46 crc kubenswrapper[5070]: E1213 03:43:46.266997 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerName="extract-content" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.267417 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerName="extract-content" Dec 13 03:43:46 crc kubenswrapper[5070]: E1213 03:43:46.267458 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerName="extract-utilities" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.267472 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerName="extract-utilities" Dec 13 03:43:46 crc kubenswrapper[5070]: E1213 03:43:46.267531 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerName="registry-server" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.267543 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerName="registry-server" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.268682 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="5583bfce-70ea-42f9-ac42-2c871e93fcdf" containerName="registry-server" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.271268 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.280881 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-24z2t"] Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.361643 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-utilities\") pod \"certified-operators-24z2t\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.361711 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-catalog-content\") pod \"certified-operators-24z2t\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.361730 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsc8k\" (UniqueName: \"kubernetes.io/projected/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-kube-api-access-hsc8k\") pod \"certified-operators-24z2t\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.462940 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-utilities\") pod \"certified-operators-24z2t\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.463007 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-catalog-content\") pod \"certified-operators-24z2t\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.463024 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsc8k\" (UniqueName: \"kubernetes.io/projected/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-kube-api-access-hsc8k\") pod \"certified-operators-24z2t\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.463579 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-utilities\") pod \"certified-operators-24z2t\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.463925 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-catalog-content\") pod \"certified-operators-24z2t\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.485419 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsc8k\" (UniqueName: \"kubernetes.io/projected/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-kube-api-access-hsc8k\") pod \"certified-operators-24z2t\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.597232 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:46 crc kubenswrapper[5070]: I1213 03:43:46.964652 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-24z2t"] Dec 13 03:43:47 crc kubenswrapper[5070]: I1213 03:43:47.880708 5070 generic.go:334] "Generic (PLEG): container finished" podID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerID="bc9283bdac873b4fda66643148a8e66606e8e7541f9430df885e660c8715d4b3" exitCode=0 Dec 13 03:43:47 crc kubenswrapper[5070]: I1213 03:43:47.880811 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-24z2t" event={"ID":"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd","Type":"ContainerDied","Data":"bc9283bdac873b4fda66643148a8e66606e8e7541f9430df885e660c8715d4b3"} Dec 13 03:43:47 crc kubenswrapper[5070]: I1213 03:43:47.881091 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-24z2t" event={"ID":"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd","Type":"ContainerStarted","Data":"411897a82c78f055652b28fe43a2c89f45e5a3b2c5f3e42ca86a4744c5d02f17"} Dec 13 03:43:49 crc kubenswrapper[5070]: I1213 03:43:49.899892 5070 generic.go:334] "Generic (PLEG): container finished" podID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerID="9299c8c91e19e5d1ef9664e361c57f87fe81428aed91e42b68e73ba4a3b0f984" exitCode=0 Dec 13 03:43:49 crc kubenswrapper[5070]: I1213 03:43:49.899940 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-24z2t" event={"ID":"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd","Type":"ContainerDied","Data":"9299c8c91e19e5d1ef9664e361c57f87fe81428aed91e42b68e73ba4a3b0f984"} Dec 13 03:43:50 crc kubenswrapper[5070]: I1213 03:43:50.928801 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-24z2t" event={"ID":"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd","Type":"ContainerStarted","Data":"05df90b3c1e33cdd2a2d425d77b345c3d7c586d0392dd1e9fd3e97c54486638e"} Dec 13 03:43:50 crc kubenswrapper[5070]: I1213 03:43:50.959478 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-24z2t" podStartSLOduration=2.485938849 podStartE2EDuration="4.9594362s" podCreationTimestamp="2025-12-13 03:43:46 +0000 UTC" firstStartedPulling="2025-12-13 03:43:47.882758977 +0000 UTC m=+1920.118602523" lastFinishedPulling="2025-12-13 03:43:50.356256328 +0000 UTC m=+1922.592099874" observedRunningTime="2025-12-13 03:43:50.952952384 +0000 UTC m=+1923.188795940" watchObservedRunningTime="2025-12-13 03:43:50.9594362 +0000 UTC m=+1923.195279746" Dec 13 03:43:56 crc kubenswrapper[5070]: I1213 03:43:56.597793 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:56 crc kubenswrapper[5070]: I1213 03:43:56.598362 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:56 crc kubenswrapper[5070]: I1213 03:43:56.650709 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:57 crc kubenswrapper[5070]: I1213 03:43:57.032727 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:43:57 crc kubenswrapper[5070]: I1213 03:43:57.086342 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-24z2t"] Dec 13 03:43:59 crc kubenswrapper[5070]: I1213 03:43:58.999638 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-24z2t" podUID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerName="registry-server" containerID="cri-o://05df90b3c1e33cdd2a2d425d77b345c3d7c586d0392dd1e9fd3e97c54486638e" gracePeriod=2 Dec 13 03:43:59 crc kubenswrapper[5070]: I1213 03:43:59.167214 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:43:59 crc kubenswrapper[5070]: E1213 03:43:59.167487 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:43:59 crc kubenswrapper[5070]: E1213 03:43:59.810149 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab28dfeb_4eb6_4ec4_9605_ad59eee583bd.slice/crio-conmon-05df90b3c1e33cdd2a2d425d77b345c3d7c586d0392dd1e9fd3e97c54486638e.scope\": RecentStats: unable to find data in memory cache]" Dec 13 03:44:00 crc kubenswrapper[5070]: I1213 03:44:00.013113 5070 generic.go:334] "Generic (PLEG): container finished" podID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerID="05df90b3c1e33cdd2a2d425d77b345c3d7c586d0392dd1e9fd3e97c54486638e" exitCode=0 Dec 13 03:44:00 crc kubenswrapper[5070]: I1213 03:44:00.013177 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-24z2t" event={"ID":"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd","Type":"ContainerDied","Data":"05df90b3c1e33cdd2a2d425d77b345c3d7c586d0392dd1e9fd3e97c54486638e"} Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.302617 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mqq8x"] Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.304784 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.417833 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgzcw\" (UniqueName: \"kubernetes.io/projected/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-kube-api-access-tgzcw\") pod \"community-operators-mqq8x\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.417928 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-utilities\") pod \"community-operators-mqq8x\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.418049 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-catalog-content\") pod \"community-operators-mqq8x\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.521150 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-catalog-content\") pod \"community-operators-mqq8x\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.521351 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgzcw\" (UniqueName: \"kubernetes.io/projected/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-kube-api-access-tgzcw\") pod \"community-operators-mqq8x\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.521436 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-utilities\") pod \"community-operators-mqq8x\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.521715 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-catalog-content\") pod \"community-operators-mqq8x\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.521910 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-utilities\") pod \"community-operators-mqq8x\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.551612 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgzcw\" (UniqueName: \"kubernetes.io/projected/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-kube-api-access-tgzcw\") pod \"community-operators-mqq8x\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:00.633908 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.080919 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mqq8x"] Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.521489 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.654982 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsc8k\" (UniqueName: \"kubernetes.io/projected/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-kube-api-access-hsc8k\") pod \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.655468 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-catalog-content\") pod \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.655519 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-utilities\") pod \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\" (UID: \"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd\") " Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.657431 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-utilities" (OuterVolumeSpecName: "utilities") pod "ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" (UID: "ab28dfeb-4eb6-4ec4-9605-ad59eee583bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.664037 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-kube-api-access-hsc8k" (OuterVolumeSpecName: "kube-api-access-hsc8k") pod "ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" (UID: "ab28dfeb-4eb6-4ec4-9605-ad59eee583bd"). InnerVolumeSpecName "kube-api-access-hsc8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.719361 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mqq8x"] Dec 13 03:44:01 crc kubenswrapper[5070]: W1213 03:44:01.727637 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod866b6c6a_2bca_40e3_a7fc_415ce419d9ce.slice/crio-d986571bf37f9c429becc38e332b51443ef7e3b4b6e455a8c1eaba195675cc41 WatchSource:0}: Error finding container d986571bf37f9c429becc38e332b51443ef7e3b4b6e455a8c1eaba195675cc41: Status 404 returned error can't find the container with id d986571bf37f9c429becc38e332b51443ef7e3b4b6e455a8c1eaba195675cc41 Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.762813 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsc8k\" (UniqueName: \"kubernetes.io/projected/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-kube-api-access-hsc8k\") on node \"crc\" DevicePath \"\"" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.762860 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.789909 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" (UID: "ab28dfeb-4eb6-4ec4-9605-ad59eee583bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:44:01 crc kubenswrapper[5070]: I1213 03:44:01.864825 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.087933 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-24z2t" event={"ID":"ab28dfeb-4eb6-4ec4-9605-ad59eee583bd","Type":"ContainerDied","Data":"411897a82c78f055652b28fe43a2c89f45e5a3b2c5f3e42ca86a4744c5d02f17"} Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.087956 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-24z2t" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.087993 5070 scope.go:117] "RemoveContainer" containerID="05df90b3c1e33cdd2a2d425d77b345c3d7c586d0392dd1e9fd3e97c54486638e" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.090353 5070 generic.go:334] "Generic (PLEG): container finished" podID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerID="7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590" exitCode=0 Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.090549 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mqq8x" event={"ID":"866b6c6a-2bca-40e3-a7fc-415ce419d9ce","Type":"ContainerDied","Data":"7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590"} Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.090728 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mqq8x" event={"ID":"866b6c6a-2bca-40e3-a7fc-415ce419d9ce","Type":"ContainerStarted","Data":"d986571bf37f9c429becc38e332b51443ef7e3b4b6e455a8c1eaba195675cc41"} Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.119424 5070 scope.go:117] "RemoveContainer" containerID="9299c8c91e19e5d1ef9664e361c57f87fe81428aed91e42b68e73ba4a3b0f984" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.182654 5070 scope.go:117] "RemoveContainer" containerID="bc9283bdac873b4fda66643148a8e66606e8e7541f9430df885e660c8715d4b3" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.200244 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-24z2t"] Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.207401 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-24z2t"] Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.896746 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pg9xq"] Dec 13 03:44:02 crc kubenswrapper[5070]: E1213 03:44:02.897224 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerName="registry-server" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.897264 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerName="registry-server" Dec 13 03:44:02 crc kubenswrapper[5070]: E1213 03:44:02.897285 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerName="extract-utilities" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.897294 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerName="extract-utilities" Dec 13 03:44:02 crc kubenswrapper[5070]: E1213 03:44:02.897310 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerName="extract-content" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.897319 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerName="extract-content" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.897665 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" containerName="registry-server" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.899260 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.916923 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pg9xq"] Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.994372 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-catalog-content\") pod \"redhat-marketplace-pg9xq\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.994554 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-utilities\") pod \"redhat-marketplace-pg9xq\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:02 crc kubenswrapper[5070]: I1213 03:44:02.994705 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgdwz\" (UniqueName: \"kubernetes.io/projected/55c77756-8958-4a70-9d22-964a13bdd82c-kube-api-access-xgdwz\") pod \"redhat-marketplace-pg9xq\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:03 crc kubenswrapper[5070]: I1213 03:44:03.095942 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-utilities\") pod \"redhat-marketplace-pg9xq\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:03 crc kubenswrapper[5070]: I1213 03:44:03.096062 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgdwz\" (UniqueName: \"kubernetes.io/projected/55c77756-8958-4a70-9d22-964a13bdd82c-kube-api-access-xgdwz\") pod \"redhat-marketplace-pg9xq\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:03 crc kubenswrapper[5070]: I1213 03:44:03.096100 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-catalog-content\") pod \"redhat-marketplace-pg9xq\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:03 crc kubenswrapper[5070]: I1213 03:44:03.096755 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-utilities\") pod \"redhat-marketplace-pg9xq\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:03 crc kubenswrapper[5070]: I1213 03:44:03.096998 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-catalog-content\") pod \"redhat-marketplace-pg9xq\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:03 crc kubenswrapper[5070]: I1213 03:44:03.120720 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgdwz\" (UniqueName: \"kubernetes.io/projected/55c77756-8958-4a70-9d22-964a13bdd82c-kube-api-access-xgdwz\") pod \"redhat-marketplace-pg9xq\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:03 crc kubenswrapper[5070]: I1213 03:44:03.240902 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:03 crc kubenswrapper[5070]: I1213 03:44:03.767262 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pg9xq"] Dec 13 03:44:03 crc kubenswrapper[5070]: W1213 03:44:03.775525 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55c77756_8958_4a70_9d22_964a13bdd82c.slice/crio-444227ce48a1cf56939380685165629336493a1641d469bab31100d1bb8a42af WatchSource:0}: Error finding container 444227ce48a1cf56939380685165629336493a1641d469bab31100d1bb8a42af: Status 404 returned error can't find the container with id 444227ce48a1cf56939380685165629336493a1641d469bab31100d1bb8a42af Dec 13 03:44:04 crc kubenswrapper[5070]: I1213 03:44:04.110562 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pg9xq" event={"ID":"55c77756-8958-4a70-9d22-964a13bdd82c","Type":"ContainerStarted","Data":"444227ce48a1cf56939380685165629336493a1641d469bab31100d1bb8a42af"} Dec 13 03:44:04 crc kubenswrapper[5070]: I1213 03:44:04.181494 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab28dfeb-4eb6-4ec4-9605-ad59eee583bd" path="/var/lib/kubelet/pods/ab28dfeb-4eb6-4ec4-9605-ad59eee583bd/volumes" Dec 13 03:44:06 crc kubenswrapper[5070]: I1213 03:44:06.137675 5070 generic.go:334] "Generic (PLEG): container finished" podID="55c77756-8958-4a70-9d22-964a13bdd82c" containerID="5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a" exitCode=0 Dec 13 03:44:06 crc kubenswrapper[5070]: I1213 03:44:06.137820 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pg9xq" event={"ID":"55c77756-8958-4a70-9d22-964a13bdd82c","Type":"ContainerDied","Data":"5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a"} Dec 13 03:44:08 crc kubenswrapper[5070]: I1213 03:44:08.157237 5070 generic.go:334] "Generic (PLEG): container finished" podID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerID="3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb" exitCode=0 Dec 13 03:44:08 crc kubenswrapper[5070]: I1213 03:44:08.157666 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mqq8x" event={"ID":"866b6c6a-2bca-40e3-a7fc-415ce419d9ce","Type":"ContainerDied","Data":"3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb"} Dec 13 03:44:10 crc kubenswrapper[5070]: I1213 03:44:10.171553 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:44:10 crc kubenswrapper[5070]: E1213 03:44:10.172410 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:44:10 crc kubenswrapper[5070]: I1213 03:44:10.192235 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mqq8x" event={"ID":"866b6c6a-2bca-40e3-a7fc-415ce419d9ce","Type":"ContainerStarted","Data":"60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47"} Dec 13 03:44:10 crc kubenswrapper[5070]: I1213 03:44:10.194956 5070 generic.go:334] "Generic (PLEG): container finished" podID="55c77756-8958-4a70-9d22-964a13bdd82c" containerID="5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f" exitCode=0 Dec 13 03:44:10 crc kubenswrapper[5070]: I1213 03:44:10.194998 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pg9xq" event={"ID":"55c77756-8958-4a70-9d22-964a13bdd82c","Type":"ContainerDied","Data":"5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f"} Dec 13 03:44:10 crc kubenswrapper[5070]: I1213 03:44:10.213262 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mqq8x" podStartSLOduration=2.925269352 podStartE2EDuration="10.213242747s" podCreationTimestamp="2025-12-13 03:44:00 +0000 UTC" firstStartedPulling="2025-12-13 03:44:02.096599365 +0000 UTC m=+1934.332442911" lastFinishedPulling="2025-12-13 03:44:09.38457277 +0000 UTC m=+1941.620416306" observedRunningTime="2025-12-13 03:44:10.20821805 +0000 UTC m=+1942.444061606" watchObservedRunningTime="2025-12-13 03:44:10.213242747 +0000 UTC m=+1942.449086293" Dec 13 03:44:10 crc kubenswrapper[5070]: I1213 03:44:10.634078 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:10 crc kubenswrapper[5070]: I1213 03:44:10.634425 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:11 crc kubenswrapper[5070]: I1213 03:44:11.204774 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pg9xq" event={"ID":"55c77756-8958-4a70-9d22-964a13bdd82c","Type":"ContainerStarted","Data":"3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e"} Dec 13 03:44:11 crc kubenswrapper[5070]: I1213 03:44:11.222020 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pg9xq" podStartSLOduration=5.659391584 podStartE2EDuration="9.22200431s" podCreationTimestamp="2025-12-13 03:44:02 +0000 UTC" firstStartedPulling="2025-12-13 03:44:07.256057276 +0000 UTC m=+1939.491900822" lastFinishedPulling="2025-12-13 03:44:10.818670002 +0000 UTC m=+1943.054513548" observedRunningTime="2025-12-13 03:44:11.219616956 +0000 UTC m=+1943.455460522" watchObservedRunningTime="2025-12-13 03:44:11.22200431 +0000 UTC m=+1943.457847856" Dec 13 03:44:11 crc kubenswrapper[5070]: I1213 03:44:11.691142 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-mqq8x" podUID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerName="registry-server" probeResult="failure" output=< Dec 13 03:44:11 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 03:44:11 crc kubenswrapper[5070]: > Dec 13 03:44:13 crc kubenswrapper[5070]: I1213 03:44:13.241590 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:13 crc kubenswrapper[5070]: I1213 03:44:13.243295 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:13 crc kubenswrapper[5070]: I1213 03:44:13.290975 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:15 crc kubenswrapper[5070]: I1213 03:44:15.288022 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:15 crc kubenswrapper[5070]: I1213 03:44:15.344025 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pg9xq"] Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.250915 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pg9xq" podUID="55c77756-8958-4a70-9d22-964a13bdd82c" containerName="registry-server" containerID="cri-o://3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e" gracePeriod=2 Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.634235 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.689777 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-catalog-content\") pod \"55c77756-8958-4a70-9d22-964a13bdd82c\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.689985 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgdwz\" (UniqueName: \"kubernetes.io/projected/55c77756-8958-4a70-9d22-964a13bdd82c-kube-api-access-xgdwz\") pod \"55c77756-8958-4a70-9d22-964a13bdd82c\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.696010 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55c77756-8958-4a70-9d22-964a13bdd82c-kube-api-access-xgdwz" (OuterVolumeSpecName: "kube-api-access-xgdwz") pod "55c77756-8958-4a70-9d22-964a13bdd82c" (UID: "55c77756-8958-4a70-9d22-964a13bdd82c"). InnerVolumeSpecName "kube-api-access-xgdwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.712463 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "55c77756-8958-4a70-9d22-964a13bdd82c" (UID: "55c77756-8958-4a70-9d22-964a13bdd82c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.792107 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-utilities\") pod \"55c77756-8958-4a70-9d22-964a13bdd82c\" (UID: \"55c77756-8958-4a70-9d22-964a13bdd82c\") " Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.792644 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.792663 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgdwz\" (UniqueName: \"kubernetes.io/projected/55c77756-8958-4a70-9d22-964a13bdd82c-kube-api-access-xgdwz\") on node \"crc\" DevicePath \"\"" Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.792976 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-utilities" (OuterVolumeSpecName: "utilities") pod "55c77756-8958-4a70-9d22-964a13bdd82c" (UID: "55c77756-8958-4a70-9d22-964a13bdd82c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:44:17 crc kubenswrapper[5070]: I1213 03:44:17.894817 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55c77756-8958-4a70-9d22-964a13bdd82c-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.259946 5070 generic.go:334] "Generic (PLEG): container finished" podID="55c77756-8958-4a70-9d22-964a13bdd82c" containerID="3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e" exitCode=0 Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.260021 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pg9xq" Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.260043 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pg9xq" event={"ID":"55c77756-8958-4a70-9d22-964a13bdd82c","Type":"ContainerDied","Data":"3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e"} Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.260416 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pg9xq" event={"ID":"55c77756-8958-4a70-9d22-964a13bdd82c","Type":"ContainerDied","Data":"444227ce48a1cf56939380685165629336493a1641d469bab31100d1bb8a42af"} Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.260438 5070 scope.go:117] "RemoveContainer" containerID="3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e" Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.287936 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pg9xq"] Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.290949 5070 scope.go:117] "RemoveContainer" containerID="5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f" Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.296791 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pg9xq"] Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.322627 5070 scope.go:117] "RemoveContainer" containerID="5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a" Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.352162 5070 scope.go:117] "RemoveContainer" containerID="3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e" Dec 13 03:44:18 crc kubenswrapper[5070]: E1213 03:44:18.353106 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e\": container with ID starting with 3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e not found: ID does not exist" containerID="3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e" Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.353153 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e"} err="failed to get container status \"3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e\": rpc error: code = NotFound desc = could not find container \"3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e\": container with ID starting with 3a05f05cb78926cbf37b5f4af02f03d402771d5f28282998d4a3fa3490fe0d5e not found: ID does not exist" Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.353179 5070 scope.go:117] "RemoveContainer" containerID="5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f" Dec 13 03:44:18 crc kubenswrapper[5070]: E1213 03:44:18.353577 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f\": container with ID starting with 5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f not found: ID does not exist" containerID="5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f" Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.353622 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f"} err="failed to get container status \"5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f\": rpc error: code = NotFound desc = could not find container \"5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f\": container with ID starting with 5b3fac62942df026e5e33ce1459671ba4b3175b20951cb67da7a6b27b736360f not found: ID does not exist" Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.353676 5070 scope.go:117] "RemoveContainer" containerID="5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a" Dec 13 03:44:18 crc kubenswrapper[5070]: E1213 03:44:18.354121 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a\": container with ID starting with 5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a not found: ID does not exist" containerID="5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a" Dec 13 03:44:18 crc kubenswrapper[5070]: I1213 03:44:18.354146 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a"} err="failed to get container status \"5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a\": rpc error: code = NotFound desc = could not find container \"5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a\": container with ID starting with 5a02dd0f0de021c90f2e4bf06b0d91ed49b49d5845dfce41803bf8522516089a not found: ID does not exist" Dec 13 03:44:20 crc kubenswrapper[5070]: I1213 03:44:20.180838 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55c77756-8958-4a70-9d22-964a13bdd82c" path="/var/lib/kubelet/pods/55c77756-8958-4a70-9d22-964a13bdd82c/volumes" Dec 13 03:44:20 crc kubenswrapper[5070]: I1213 03:44:20.684655 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:20 crc kubenswrapper[5070]: I1213 03:44:20.732092 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:21 crc kubenswrapper[5070]: I1213 03:44:21.927046 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mqq8x"] Dec 13 03:44:22 crc kubenswrapper[5070]: I1213 03:44:22.303936 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mqq8x" podUID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerName="registry-server" containerID="cri-o://60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47" gracePeriod=2 Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.168036 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:44:23 crc kubenswrapper[5070]: E1213 03:44:23.168423 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.295990 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.314646 5070 generic.go:334] "Generic (PLEG): container finished" podID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerID="60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47" exitCode=0 Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.314692 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mqq8x" event={"ID":"866b6c6a-2bca-40e3-a7fc-415ce419d9ce","Type":"ContainerDied","Data":"60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47"} Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.314726 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mqq8x" event={"ID":"866b6c6a-2bca-40e3-a7fc-415ce419d9ce","Type":"ContainerDied","Data":"d986571bf37f9c429becc38e332b51443ef7e3b4b6e455a8c1eaba195675cc41"} Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.314746 5070 scope.go:117] "RemoveContainer" containerID="60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.314902 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mqq8x" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.376616 5070 scope.go:117] "RemoveContainer" containerID="3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.396037 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-utilities\") pod \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.396174 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgzcw\" (UniqueName: \"kubernetes.io/projected/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-kube-api-access-tgzcw\") pod \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.396236 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-catalog-content\") pod \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\" (UID: \"866b6c6a-2bca-40e3-a7fc-415ce419d9ce\") " Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.397141 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-utilities" (OuterVolumeSpecName: "utilities") pod "866b6c6a-2bca-40e3-a7fc-415ce419d9ce" (UID: "866b6c6a-2bca-40e3-a7fc-415ce419d9ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.403493 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-kube-api-access-tgzcw" (OuterVolumeSpecName: "kube-api-access-tgzcw") pod "866b6c6a-2bca-40e3-a7fc-415ce419d9ce" (UID: "866b6c6a-2bca-40e3-a7fc-415ce419d9ce"). InnerVolumeSpecName "kube-api-access-tgzcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.407662 5070 scope.go:117] "RemoveContainer" containerID="7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.452358 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "866b6c6a-2bca-40e3-a7fc-415ce419d9ce" (UID: "866b6c6a-2bca-40e3-a7fc-415ce419d9ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.467341 5070 scope.go:117] "RemoveContainer" containerID="60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47" Dec 13 03:44:23 crc kubenswrapper[5070]: E1213 03:44:23.467785 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47\": container with ID starting with 60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47 not found: ID does not exist" containerID="60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.467827 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47"} err="failed to get container status \"60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47\": rpc error: code = NotFound desc = could not find container \"60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47\": container with ID starting with 60ceecb5c2ae07b1e72ba34120abe7f99fec6e5899078673b684b41aae75ae47 not found: ID does not exist" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.467853 5070 scope.go:117] "RemoveContainer" containerID="3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb" Dec 13 03:44:23 crc kubenswrapper[5070]: E1213 03:44:23.468076 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb\": container with ID starting with 3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb not found: ID does not exist" containerID="3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.468095 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb"} err="failed to get container status \"3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb\": rpc error: code = NotFound desc = could not find container \"3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb\": container with ID starting with 3481d1a0e19811c79262a9b59dfc1bba1b003ce5650f9de32c6b6f85b5863ceb not found: ID does not exist" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.468108 5070 scope.go:117] "RemoveContainer" containerID="7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590" Dec 13 03:44:23 crc kubenswrapper[5070]: E1213 03:44:23.468441 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590\": container with ID starting with 7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590 not found: ID does not exist" containerID="7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.468483 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590"} err="failed to get container status \"7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590\": rpc error: code = NotFound desc = could not find container \"7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590\": container with ID starting with 7b48e28ff716e77b35a05fae7aa10aa39991547e6064cbbc17cde54fc1b49590 not found: ID does not exist" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.498678 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.499015 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgzcw\" (UniqueName: \"kubernetes.io/projected/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-kube-api-access-tgzcw\") on node \"crc\" DevicePath \"\"" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.499026 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/866b6c6a-2bca-40e3-a7fc-415ce419d9ce-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.648933 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mqq8x"] Dec 13 03:44:23 crc kubenswrapper[5070]: I1213 03:44:23.656572 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mqq8x"] Dec 13 03:44:24 crc kubenswrapper[5070]: I1213 03:44:24.179442 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" path="/var/lib/kubelet/pods/866b6c6a-2bca-40e3-a7fc-415ce419d9ce/volumes" Dec 13 03:44:38 crc kubenswrapper[5070]: I1213 03:44:38.172344 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:44:38 crc kubenswrapper[5070]: E1213 03:44:38.173125 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:44:54 crc kubenswrapper[5070]: I1213 03:44:54.172524 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:44:54 crc kubenswrapper[5070]: E1213 03:44:54.173195 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.150503 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh"] Dec 13 03:45:00 crc kubenswrapper[5070]: E1213 03:45:00.151281 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55c77756-8958-4a70-9d22-964a13bdd82c" containerName="extract-utilities" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.151296 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="55c77756-8958-4a70-9d22-964a13bdd82c" containerName="extract-utilities" Dec 13 03:45:00 crc kubenswrapper[5070]: E1213 03:45:00.151317 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerName="extract-utilities" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.151323 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerName="extract-utilities" Dec 13 03:45:00 crc kubenswrapper[5070]: E1213 03:45:00.151330 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerName="extract-content" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.151336 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerName="extract-content" Dec 13 03:45:00 crc kubenswrapper[5070]: E1213 03:45:00.151359 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55c77756-8958-4a70-9d22-964a13bdd82c" containerName="registry-server" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.151365 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="55c77756-8958-4a70-9d22-964a13bdd82c" containerName="registry-server" Dec 13 03:45:00 crc kubenswrapper[5070]: E1213 03:45:00.151380 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55c77756-8958-4a70-9d22-964a13bdd82c" containerName="extract-content" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.151385 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="55c77756-8958-4a70-9d22-964a13bdd82c" containerName="extract-content" Dec 13 03:45:00 crc kubenswrapper[5070]: E1213 03:45:00.151397 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerName="registry-server" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.151403 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerName="registry-server" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.151687 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="55c77756-8958-4a70-9d22-964a13bdd82c" containerName="registry-server" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.151704 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="866b6c6a-2bca-40e3-a7fc-415ce419d9ce" containerName="registry-server" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.152322 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.155749 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.156010 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.180232 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh"] Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.324939 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-config-volume\") pod \"collect-profiles-29426625-7vnfh\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.325395 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8w8r\" (UniqueName: \"kubernetes.io/projected/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-kube-api-access-m8w8r\") pod \"collect-profiles-29426625-7vnfh\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.325656 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-secret-volume\") pod \"collect-profiles-29426625-7vnfh\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.428327 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-config-volume\") pod \"collect-profiles-29426625-7vnfh\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.428382 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8w8r\" (UniqueName: \"kubernetes.io/projected/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-kube-api-access-m8w8r\") pod \"collect-profiles-29426625-7vnfh\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.428407 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-secret-volume\") pod \"collect-profiles-29426625-7vnfh\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.429772 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-config-volume\") pod \"collect-profiles-29426625-7vnfh\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.437105 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-secret-volume\") pod \"collect-profiles-29426625-7vnfh\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.449143 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8w8r\" (UniqueName: \"kubernetes.io/projected/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-kube-api-access-m8w8r\") pod \"collect-profiles-29426625-7vnfh\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.473430 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:00 crc kubenswrapper[5070]: I1213 03:45:00.931976 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh"] Dec 13 03:45:01 crc kubenswrapper[5070]: I1213 03:45:01.708725 5070 generic.go:334] "Generic (PLEG): container finished" podID="abeebb11-0cb3-4bd7-8817-7a6495a2fe47" containerID="6d84daa83a308bbbb87c67ab26ee4b5ed48d91a2e01d2aaf213120d7f2066e9a" exitCode=0 Dec 13 03:45:01 crc kubenswrapper[5070]: I1213 03:45:01.708833 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" event={"ID":"abeebb11-0cb3-4bd7-8817-7a6495a2fe47","Type":"ContainerDied","Data":"6d84daa83a308bbbb87c67ab26ee4b5ed48d91a2e01d2aaf213120d7f2066e9a"} Dec 13 03:45:01 crc kubenswrapper[5070]: I1213 03:45:01.709201 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" event={"ID":"abeebb11-0cb3-4bd7-8817-7a6495a2fe47","Type":"ContainerStarted","Data":"9ecb5898a9389903f1c39afcc1e0d70f7d1accd155db0b5797c6366945561ecc"} Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.042002 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.197285 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-config-volume\") pod \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.197962 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-config-volume" (OuterVolumeSpecName: "config-volume") pod "abeebb11-0cb3-4bd7-8817-7a6495a2fe47" (UID: "abeebb11-0cb3-4bd7-8817-7a6495a2fe47"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.198273 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-secret-volume\") pod \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.198332 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8w8r\" (UniqueName: \"kubernetes.io/projected/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-kube-api-access-m8w8r\") pod \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\" (UID: \"abeebb11-0cb3-4bd7-8817-7a6495a2fe47\") " Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.198824 5070 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.203623 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-kube-api-access-m8w8r" (OuterVolumeSpecName: "kube-api-access-m8w8r") pod "abeebb11-0cb3-4bd7-8817-7a6495a2fe47" (UID: "abeebb11-0cb3-4bd7-8817-7a6495a2fe47"). InnerVolumeSpecName "kube-api-access-m8w8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.203995 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "abeebb11-0cb3-4bd7-8817-7a6495a2fe47" (UID: "abeebb11-0cb3-4bd7-8817-7a6495a2fe47"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.301286 5070 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.301321 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8w8r\" (UniqueName: \"kubernetes.io/projected/abeebb11-0cb3-4bd7-8817-7a6495a2fe47-kube-api-access-m8w8r\") on node \"crc\" DevicePath \"\"" Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.736094 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" event={"ID":"abeebb11-0cb3-4bd7-8817-7a6495a2fe47","Type":"ContainerDied","Data":"9ecb5898a9389903f1c39afcc1e0d70f7d1accd155db0b5797c6366945561ecc"} Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.736141 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ecb5898a9389903f1c39afcc1e0d70f7d1accd155db0b5797c6366945561ecc" Dec 13 03:45:03 crc kubenswrapper[5070]: I1213 03:45:03.736201 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh" Dec 13 03:45:04 crc kubenswrapper[5070]: I1213 03:45:04.126099 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs"] Dec 13 03:45:04 crc kubenswrapper[5070]: I1213 03:45:04.135084 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426580-zzsjs"] Dec 13 03:45:04 crc kubenswrapper[5070]: I1213 03:45:04.179936 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11d847a0-fe18-4f4a-8d65-b0f64b643c68" path="/var/lib/kubelet/pods/11d847a0-fe18-4f4a-8d65-b0f64b643c68/volumes" Dec 13 03:45:09 crc kubenswrapper[5070]: I1213 03:45:09.166731 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:45:09 crc kubenswrapper[5070]: E1213 03:45:09.167532 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:45:22 crc kubenswrapper[5070]: I1213 03:45:22.167290 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:45:22 crc kubenswrapper[5070]: I1213 03:45:22.918198 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"780d0740a03317cdcccec08fe9d993d16c77a58a86849080dfe009ba45672bf2"} Dec 13 03:45:25 crc kubenswrapper[5070]: I1213 03:45:25.404985 5070 scope.go:117] "RemoveContainer" containerID="6b159dad678009b8c726600e93ff8fb8b01cd53bd0732a088e523eb2d241b277" Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.027399 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.035464 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.046211 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.055033 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.062815 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-t7hrk"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.069971 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z8z45"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.076633 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2g9ds"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.082960 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.088861 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-96kcn"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.094797 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-t7hrk"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.101135 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p99j4"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.108176 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.114964 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6xnt9"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.122322 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.129329 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.135655 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.141598 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-gwf6t"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.147572 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2lj4m"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.164956 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-f8v8z"] Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.186200 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06c38db1-1673-4e3f-b4b7-50277a407e82" path="/var/lib/kubelet/pods/06c38db1-1673-4e3f-b4b7-50277a407e82/volumes" Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.187172 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="201e6f0e-784e-4526-bcbd-3c6ca1d592b6" path="/var/lib/kubelet/pods/201e6f0e-784e-4526-bcbd-3c6ca1d592b6/volumes" Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.188389 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="639f210d-0488-4906-9a1e-00fdc9e1c77f" path="/var/lib/kubelet/pods/639f210d-0488-4906-9a1e-00fdc9e1c77f/volumes" Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.189185 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72639ee2-e2ea-4ca5-ad91-5998c4780ba0" path="/var/lib/kubelet/pods/72639ee2-e2ea-4ca5-ad91-5998c4780ba0/volumes" Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.190886 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73eb5f6f-c53f-40a7-950a-901fadf89183" path="/var/lib/kubelet/pods/73eb5f6f-c53f-40a7-950a-901fadf89183/volumes" Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.191833 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a500beac-00af-444d-8ca8-52c3c7c0af60" path="/var/lib/kubelet/pods/a500beac-00af-444d-8ca8-52c3c7c0af60/volumes" Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.192718 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab3ac857-35ce-4137-929a-5f1162c4ca8a" path="/var/lib/kubelet/pods/ab3ac857-35ce-4137-929a-5f1162c4ca8a/volumes" Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.193788 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3044217-1b50-4e01-8b17-cdb64d3a850e" path="/var/lib/kubelet/pods/f3044217-1b50-4e01-8b17-cdb64d3a850e/volumes" Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.194435 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff5ad124-4227-43d1-ac79-458743c916f1" path="/var/lib/kubelet/pods/ff5ad124-4227-43d1-ac79-458743c916f1/volumes" Dec 13 03:46:44 crc kubenswrapper[5070]: I1213 03:46:44.195077 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-jpr2m"] Dec 13 03:46:46 crc kubenswrapper[5070]: I1213 03:46:46.179092 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f82187b-9034-41c1-b08c-a18a14623fdb" path="/var/lib/kubelet/pods/2f82187b-9034-41c1-b08c-a18a14623fdb/volumes" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.165099 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj"] Dec 13 03:46:50 crc kubenswrapper[5070]: E1213 03:46:50.165897 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abeebb11-0cb3-4bd7-8817-7a6495a2fe47" containerName="collect-profiles" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.165909 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="abeebb11-0cb3-4bd7-8817-7a6495a2fe47" containerName="collect-profiles" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.166092 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="abeebb11-0cb3-4bd7-8817-7a6495a2fe47" containerName="collect-profiles" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.166882 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.169347 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.169655 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.174130 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.174573 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.174629 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.190125 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj"] Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.291459 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.291504 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7bqd\" (UniqueName: \"kubernetes.io/projected/cd87f8d8-10c1-4341-b414-c234b474b4e2-kube-api-access-g7bqd\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.291539 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.292768 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.292939 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.394976 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.395241 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.395305 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.395360 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.395379 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7bqd\" (UniqueName: \"kubernetes.io/projected/cd87f8d8-10c1-4341-b414-c234b474b4e2-kube-api-access-g7bqd\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.402389 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.404295 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.407108 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.407959 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.412018 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7bqd\" (UniqueName: \"kubernetes.io/projected/cd87f8d8-10c1-4341-b414-c234b474b4e2-kube-api-access-g7bqd\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:50 crc kubenswrapper[5070]: I1213 03:46:50.485585 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:46:51 crc kubenswrapper[5070]: I1213 03:46:51.009682 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj"] Dec 13 03:46:51 crc kubenswrapper[5070]: I1213 03:46:51.676119 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" event={"ID":"cd87f8d8-10c1-4341-b414-c234b474b4e2","Type":"ContainerStarted","Data":"15386550576c29c04e4736fe4a192b2bd943babc880f5abf3fc8f208ce117cef"} Dec 13 03:46:52 crc kubenswrapper[5070]: I1213 03:46:52.687878 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" event={"ID":"cd87f8d8-10c1-4341-b414-c234b474b4e2","Type":"ContainerStarted","Data":"41c4c967643cda6c8fcecdbe6969fcf3e8d538576766f8bf466656fafb414e64"} Dec 13 03:46:52 crc kubenswrapper[5070]: I1213 03:46:52.703790 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" podStartSLOduration=2.136357895 podStartE2EDuration="2.703773265s" podCreationTimestamp="2025-12-13 03:46:50 +0000 UTC" firstStartedPulling="2025-12-13 03:46:51.017454739 +0000 UTC m=+2103.253298285" lastFinishedPulling="2025-12-13 03:46:51.584870109 +0000 UTC m=+2103.820713655" observedRunningTime="2025-12-13 03:46:52.700872546 +0000 UTC m=+2104.936716092" watchObservedRunningTime="2025-12-13 03:46:52.703773265 +0000 UTC m=+2104.939616811" Dec 13 03:47:02 crc kubenswrapper[5070]: I1213 03:47:02.765559 5070 generic.go:334] "Generic (PLEG): container finished" podID="cd87f8d8-10c1-4341-b414-c234b474b4e2" containerID="41c4c967643cda6c8fcecdbe6969fcf3e8d538576766f8bf466656fafb414e64" exitCode=0 Dec 13 03:47:02 crc kubenswrapper[5070]: I1213 03:47:02.766064 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" event={"ID":"cd87f8d8-10c1-4341-b414-c234b474b4e2","Type":"ContainerDied","Data":"41c4c967643cda6c8fcecdbe6969fcf3e8d538576766f8bf466656fafb414e64"} Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.216649 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.340773 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7bqd\" (UniqueName: \"kubernetes.io/projected/cd87f8d8-10c1-4341-b414-c234b474b4e2-kube-api-access-g7bqd\") pod \"cd87f8d8-10c1-4341-b414-c234b474b4e2\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.340983 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-inventory\") pod \"cd87f8d8-10c1-4341-b414-c234b474b4e2\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.341058 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-repo-setup-combined-ca-bundle\") pod \"cd87f8d8-10c1-4341-b414-c234b474b4e2\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.341116 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ceph\") pod \"cd87f8d8-10c1-4341-b414-c234b474b4e2\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.341221 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ssh-key\") pod \"cd87f8d8-10c1-4341-b414-c234b474b4e2\" (UID: \"cd87f8d8-10c1-4341-b414-c234b474b4e2\") " Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.346563 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "cd87f8d8-10c1-4341-b414-c234b474b4e2" (UID: "cd87f8d8-10c1-4341-b414-c234b474b4e2"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.349667 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd87f8d8-10c1-4341-b414-c234b474b4e2-kube-api-access-g7bqd" (OuterVolumeSpecName: "kube-api-access-g7bqd") pod "cd87f8d8-10c1-4341-b414-c234b474b4e2" (UID: "cd87f8d8-10c1-4341-b414-c234b474b4e2"). InnerVolumeSpecName "kube-api-access-g7bqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.350602 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ceph" (OuterVolumeSpecName: "ceph") pod "cd87f8d8-10c1-4341-b414-c234b474b4e2" (UID: "cd87f8d8-10c1-4341-b414-c234b474b4e2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.366871 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cd87f8d8-10c1-4341-b414-c234b474b4e2" (UID: "cd87f8d8-10c1-4341-b414-c234b474b4e2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.368614 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-inventory" (OuterVolumeSpecName: "inventory") pod "cd87f8d8-10c1-4341-b414-c234b474b4e2" (UID: "cd87f8d8-10c1-4341-b414-c234b474b4e2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.443205 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.443244 5070 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.443260 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.443270 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cd87f8d8-10c1-4341-b414-c234b474b4e2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.443281 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7bqd\" (UniqueName: \"kubernetes.io/projected/cd87f8d8-10c1-4341-b414-c234b474b4e2-kube-api-access-g7bqd\") on node \"crc\" DevicePath \"\"" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.788901 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" event={"ID":"cd87f8d8-10c1-4341-b414-c234b474b4e2","Type":"ContainerDied","Data":"15386550576c29c04e4736fe4a192b2bd943babc880f5abf3fc8f208ce117cef"} Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.788941 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15386550576c29c04e4736fe4a192b2bd943babc880f5abf3fc8f208ce117cef" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.789026 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.872434 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89"] Dec 13 03:47:04 crc kubenswrapper[5070]: E1213 03:47:04.872956 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd87f8d8-10c1-4341-b414-c234b474b4e2" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.872978 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd87f8d8-10c1-4341-b414-c234b474b4e2" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.873191 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd87f8d8-10c1-4341-b414-c234b474b4e2" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.873830 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.876718 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.876908 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.877031 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.877396 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.877939 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.886106 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89"] Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.952617 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.952848 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4f4k\" (UniqueName: \"kubernetes.io/projected/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-kube-api-access-j4f4k\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.952900 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.952942 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:04 crc kubenswrapper[5070]: I1213 03:47:04.953074 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.054822 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4f4k\" (UniqueName: \"kubernetes.io/projected/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-kube-api-access-j4f4k\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.054873 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.054901 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.054962 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.055010 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.059671 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.060219 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.060711 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.063908 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.080928 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4f4k\" (UniqueName: \"kubernetes.io/projected/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-kube-api-access-j4f4k\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.201022 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.710425 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89"] Dec 13 03:47:05 crc kubenswrapper[5070]: W1213 03:47:05.714003 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7859b77b_28d9_4fb3_83c6_d38cda4ecba5.slice/crio-1dd2f1197b9218210f772ce1ef135c13a14f9ae7e54faf27df7eb29a33d77d05 WatchSource:0}: Error finding container 1dd2f1197b9218210f772ce1ef135c13a14f9ae7e54faf27df7eb29a33d77d05: Status 404 returned error can't find the container with id 1dd2f1197b9218210f772ce1ef135c13a14f9ae7e54faf27df7eb29a33d77d05 Dec 13 03:47:05 crc kubenswrapper[5070]: I1213 03:47:05.800219 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" event={"ID":"7859b77b-28d9-4fb3-83c6-d38cda4ecba5","Type":"ContainerStarted","Data":"1dd2f1197b9218210f772ce1ef135c13a14f9ae7e54faf27df7eb29a33d77d05"} Dec 13 03:47:06 crc kubenswrapper[5070]: I1213 03:47:06.814133 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" event={"ID":"7859b77b-28d9-4fb3-83c6-d38cda4ecba5","Type":"ContainerStarted","Data":"e7122c909919bb7aaf7250ee178e8a1091df76a00cc19bbe9ca40fbd83788500"} Dec 13 03:47:06 crc kubenswrapper[5070]: I1213 03:47:06.840794 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" podStartSLOduration=2.324050198 podStartE2EDuration="2.840774433s" podCreationTimestamp="2025-12-13 03:47:04 +0000 UTC" firstStartedPulling="2025-12-13 03:47:05.716570513 +0000 UTC m=+2117.952414059" lastFinishedPulling="2025-12-13 03:47:06.233294728 +0000 UTC m=+2118.469138294" observedRunningTime="2025-12-13 03:47:06.835207551 +0000 UTC m=+2119.071051107" watchObservedRunningTime="2025-12-13 03:47:06.840774433 +0000 UTC m=+2119.076617979" Dec 13 03:47:25 crc kubenswrapper[5070]: I1213 03:47:25.489312 5070 scope.go:117] "RemoveContainer" containerID="7b041109717aec09fcaf235f689783e0441fde6859e1394d741bd4952e5d7436" Dec 13 03:47:25 crc kubenswrapper[5070]: I1213 03:47:25.522280 5070 scope.go:117] "RemoveContainer" containerID="bf5459e3cf5fb32afbb32b09315608abc089b4d1911d45685ec44fa39b82af15" Dec 13 03:47:25 crc kubenswrapper[5070]: I1213 03:47:25.599480 5070 scope.go:117] "RemoveContainer" containerID="e24ab748bc62e54fc73dfdb116c3786c37bce75dab0cd26250000d68d0ade04e" Dec 13 03:47:25 crc kubenswrapper[5070]: I1213 03:47:25.674950 5070 scope.go:117] "RemoveContainer" containerID="56ed47b0d0a6b465d2bcc04f090abcaf04ff5b7cead3fd6691b56ee0e6822908" Dec 13 03:47:25 crc kubenswrapper[5070]: I1213 03:47:25.707927 5070 scope.go:117] "RemoveContainer" containerID="8e505250cf00c718e1776e33659e3e72de6e4d29457840c373af295e0ad5cf7f" Dec 13 03:47:25 crc kubenswrapper[5070]: I1213 03:47:25.789642 5070 scope.go:117] "RemoveContainer" containerID="c3f78ea96eb0e9d49c006258ffc53c0220c3c1ac315c01aca660a84df8235b2e" Dec 13 03:47:25 crc kubenswrapper[5070]: I1213 03:47:25.846818 5070 scope.go:117] "RemoveContainer" containerID="add073fb630bf030820b5fd5f2a9f8959cd8acf33a1076c1ee38e8884709b36f" Dec 13 03:47:25 crc kubenswrapper[5070]: I1213 03:47:25.943601 5070 scope.go:117] "RemoveContainer" containerID="9ff69f5be9484c05bc7111b01625c104c2763c33f7fad88bee56816b490fcfe4" Dec 13 03:47:25 crc kubenswrapper[5070]: I1213 03:47:25.981786 5070 scope.go:117] "RemoveContainer" containerID="6edad932fa3ff3c2ef609119b2b212d2ba7afbd2ab1fbb62dd76f3c965d2e43b" Dec 13 03:47:51 crc kubenswrapper[5070]: I1213 03:47:51.942695 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:47:51 crc kubenswrapper[5070]: I1213 03:47:51.943393 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:48:21 crc kubenswrapper[5070]: I1213 03:48:21.942984 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:48:21 crc kubenswrapper[5070]: I1213 03:48:21.943875 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:48:26 crc kubenswrapper[5070]: I1213 03:48:26.160966 5070 scope.go:117] "RemoveContainer" containerID="70b3dde64458f0a590eebc57cf29df0d3f41e03d2f5f4b406630c9487dd866b2" Dec 13 03:48:42 crc kubenswrapper[5070]: I1213 03:48:42.719435 5070 generic.go:334] "Generic (PLEG): container finished" podID="7859b77b-28d9-4fb3-83c6-d38cda4ecba5" containerID="e7122c909919bb7aaf7250ee178e8a1091df76a00cc19bbe9ca40fbd83788500" exitCode=0 Dec 13 03:48:42 crc kubenswrapper[5070]: I1213 03:48:42.719537 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" event={"ID":"7859b77b-28d9-4fb3-83c6-d38cda4ecba5","Type":"ContainerDied","Data":"e7122c909919bb7aaf7250ee178e8a1091df76a00cc19bbe9ca40fbd83788500"} Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.133096 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.233048 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-inventory\") pod \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.233975 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ceph\") pod \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.234222 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ssh-key\") pod \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.234544 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4f4k\" (UniqueName: \"kubernetes.io/projected/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-kube-api-access-j4f4k\") pod \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.234649 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-bootstrap-combined-ca-bundle\") pod \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\" (UID: \"7859b77b-28d9-4fb3-83c6-d38cda4ecba5\") " Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.239947 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ceph" (OuterVolumeSpecName: "ceph") pod "7859b77b-28d9-4fb3-83c6-d38cda4ecba5" (UID: "7859b77b-28d9-4fb3-83c6-d38cda4ecba5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.240331 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "7859b77b-28d9-4fb3-83c6-d38cda4ecba5" (UID: "7859b77b-28d9-4fb3-83c6-d38cda4ecba5"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.240411 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-kube-api-access-j4f4k" (OuterVolumeSpecName: "kube-api-access-j4f4k") pod "7859b77b-28d9-4fb3-83c6-d38cda4ecba5" (UID: "7859b77b-28d9-4fb3-83c6-d38cda4ecba5"). InnerVolumeSpecName "kube-api-access-j4f4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.259862 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7859b77b-28d9-4fb3-83c6-d38cda4ecba5" (UID: "7859b77b-28d9-4fb3-83c6-d38cda4ecba5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.260171 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-inventory" (OuterVolumeSpecName: "inventory") pod "7859b77b-28d9-4fb3-83c6-d38cda4ecba5" (UID: "7859b77b-28d9-4fb3-83c6-d38cda4ecba5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.337165 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.337213 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.337223 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.337233 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4f4k\" (UniqueName: \"kubernetes.io/projected/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-kube-api-access-j4f4k\") on node \"crc\" DevicePath \"\"" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.337247 5070 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7859b77b-28d9-4fb3-83c6-d38cda4ecba5-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.738831 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" event={"ID":"7859b77b-28d9-4fb3-83c6-d38cda4ecba5","Type":"ContainerDied","Data":"1dd2f1197b9218210f772ce1ef135c13a14f9ae7e54faf27df7eb29a33d77d05"} Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.738897 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1dd2f1197b9218210f772ce1ef135c13a14f9ae7e54faf27df7eb29a33d77d05" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.738939 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.858541 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8"] Dec 13 03:48:44 crc kubenswrapper[5070]: E1213 03:48:44.859025 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7859b77b-28d9-4fb3-83c6-d38cda4ecba5" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.859051 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="7859b77b-28d9-4fb3-83c6-d38cda4ecba5" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.859311 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="7859b77b-28d9-4fb3-83c6-d38cda4ecba5" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.860385 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.862703 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.863615 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.863967 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.864162 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.864304 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.870696 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8"] Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.948570 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6vfk\" (UniqueName: \"kubernetes.io/projected/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-kube-api-access-h6vfk\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.948731 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.949278 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:44 crc kubenswrapper[5070]: I1213 03:48:44.949331 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.051614 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.051820 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.051872 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.051944 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6vfk\" (UniqueName: \"kubernetes.io/projected/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-kube-api-access-h6vfk\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.056679 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.057029 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.057163 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.068899 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6vfk\" (UniqueName: \"kubernetes.io/projected/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-kube-api-access-h6vfk\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.176820 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.738778 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8"] Dec 13 03:48:45 crc kubenswrapper[5070]: I1213 03:48:45.749057 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 03:48:46 crc kubenswrapper[5070]: I1213 03:48:46.757347 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" event={"ID":"8c5ddd00-19e7-48fa-8573-a1f65b853c3e","Type":"ContainerStarted","Data":"fa9d8c10e077dd62c8cbdc7ba60cfad682c0875bf22983d805e552642ca80686"} Dec 13 03:48:46 crc kubenswrapper[5070]: I1213 03:48:46.757754 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" event={"ID":"8c5ddd00-19e7-48fa-8573-a1f65b853c3e","Type":"ContainerStarted","Data":"5431b5d92ca8257f10cfdee34d274d10d2da546033707a435eab91d9ef4f6aae"} Dec 13 03:48:51 crc kubenswrapper[5070]: I1213 03:48:51.942535 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:48:51 crc kubenswrapper[5070]: I1213 03:48:51.942842 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:48:51 crc kubenswrapper[5070]: I1213 03:48:51.942892 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:48:51 crc kubenswrapper[5070]: I1213 03:48:51.943712 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"780d0740a03317cdcccec08fe9d993d16c77a58a86849080dfe009ba45672bf2"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 03:48:51 crc kubenswrapper[5070]: I1213 03:48:51.943784 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://780d0740a03317cdcccec08fe9d993d16c77a58a86849080dfe009ba45672bf2" gracePeriod=600 Dec 13 03:48:52 crc kubenswrapper[5070]: I1213 03:48:52.809602 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="780d0740a03317cdcccec08fe9d993d16c77a58a86849080dfe009ba45672bf2" exitCode=0 Dec 13 03:48:52 crc kubenswrapper[5070]: I1213 03:48:52.809656 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"780d0740a03317cdcccec08fe9d993d16c77a58a86849080dfe009ba45672bf2"} Dec 13 03:48:52 crc kubenswrapper[5070]: I1213 03:48:52.809694 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17"} Dec 13 03:48:52 crc kubenswrapper[5070]: I1213 03:48:52.809716 5070 scope.go:117] "RemoveContainer" containerID="868a59edbd4e377cf977ca832f6b9371f5846d986a6bbcacfbabf30c647e78a8" Dec 13 03:48:52 crc kubenswrapper[5070]: I1213 03:48:52.836161 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" podStartSLOduration=8.365533922000001 podStartE2EDuration="8.836143446s" podCreationTimestamp="2025-12-13 03:48:44 +0000 UTC" firstStartedPulling="2025-12-13 03:48:45.748872074 +0000 UTC m=+2217.984715620" lastFinishedPulling="2025-12-13 03:48:46.219481598 +0000 UTC m=+2218.455325144" observedRunningTime="2025-12-13 03:48:46.780775752 +0000 UTC m=+2219.016619308" watchObservedRunningTime="2025-12-13 03:48:52.836143446 +0000 UTC m=+2225.071986992" Dec 13 03:49:10 crc kubenswrapper[5070]: I1213 03:49:10.972169 5070 generic.go:334] "Generic (PLEG): container finished" podID="8c5ddd00-19e7-48fa-8573-a1f65b853c3e" containerID="fa9d8c10e077dd62c8cbdc7ba60cfad682c0875bf22983d805e552642ca80686" exitCode=0 Dec 13 03:49:10 crc kubenswrapper[5070]: I1213 03:49:10.972272 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" event={"ID":"8c5ddd00-19e7-48fa-8573-a1f65b853c3e","Type":"ContainerDied","Data":"fa9d8c10e077dd62c8cbdc7ba60cfad682c0875bf22983d805e552642ca80686"} Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.344944 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.460849 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6vfk\" (UniqueName: \"kubernetes.io/projected/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-kube-api-access-h6vfk\") pod \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.460918 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ssh-key\") pod \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.461007 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-inventory\") pod \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.461084 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ceph\") pod \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\" (UID: \"8c5ddd00-19e7-48fa-8573-a1f65b853c3e\") " Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.478693 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ceph" (OuterVolumeSpecName: "ceph") pod "8c5ddd00-19e7-48fa-8573-a1f65b853c3e" (UID: "8c5ddd00-19e7-48fa-8573-a1f65b853c3e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.479393 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-kube-api-access-h6vfk" (OuterVolumeSpecName: "kube-api-access-h6vfk") pod "8c5ddd00-19e7-48fa-8573-a1f65b853c3e" (UID: "8c5ddd00-19e7-48fa-8573-a1f65b853c3e"). InnerVolumeSpecName "kube-api-access-h6vfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.505265 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-inventory" (OuterVolumeSpecName: "inventory") pod "8c5ddd00-19e7-48fa-8573-a1f65b853c3e" (UID: "8c5ddd00-19e7-48fa-8573-a1f65b853c3e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.508963 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8c5ddd00-19e7-48fa-8573-a1f65b853c3e" (UID: "8c5ddd00-19e7-48fa-8573-a1f65b853c3e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.564556 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.564788 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.564859 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6vfk\" (UniqueName: \"kubernetes.io/projected/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-kube-api-access-h6vfk\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.564918 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c5ddd00-19e7-48fa-8573-a1f65b853c3e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.992160 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" event={"ID":"8c5ddd00-19e7-48fa-8573-a1f65b853c3e","Type":"ContainerDied","Data":"5431b5d92ca8257f10cfdee34d274d10d2da546033707a435eab91d9ef4f6aae"} Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.992222 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5431b5d92ca8257f10cfdee34d274d10d2da546033707a435eab91d9ef4f6aae" Dec 13 03:49:12 crc kubenswrapper[5070]: I1213 03:49:12.992710 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.111004 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8"] Dec 13 03:49:13 crc kubenswrapper[5070]: E1213 03:49:13.111395 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c5ddd00-19e7-48fa-8573-a1f65b853c3e" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.111413 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c5ddd00-19e7-48fa-8573-a1f65b853c3e" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.111677 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c5ddd00-19e7-48fa-8573-a1f65b853c3e" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.112298 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.118155 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.118303 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.118417 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.119288 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.119574 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.136267 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8"] Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.278388 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.278512 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.278778 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tfjc\" (UniqueName: \"kubernetes.io/projected/a603c4f1-a640-47d1-819e-0518857464c4-kube-api-access-6tfjc\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.279257 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.381123 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.381198 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.381258 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tfjc\" (UniqueName: \"kubernetes.io/projected/a603c4f1-a640-47d1-819e-0518857464c4-kube-api-access-6tfjc\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.381321 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.385346 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.385860 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.387988 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.413505 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tfjc\" (UniqueName: \"kubernetes.io/projected/a603c4f1-a640-47d1-819e-0518857464c4-kube-api-access-6tfjc\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.429807 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:13 crc kubenswrapper[5070]: I1213 03:49:13.761391 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8"] Dec 13 03:49:14 crc kubenswrapper[5070]: I1213 03:49:14.002103 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" event={"ID":"a603c4f1-a640-47d1-819e-0518857464c4","Type":"ContainerStarted","Data":"f7b1560ea1beb48fa1a918fbffca99e6203bf4639142bca5d0b22e4afbbc30b4"} Dec 13 03:49:15 crc kubenswrapper[5070]: I1213 03:49:15.011585 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" event={"ID":"a603c4f1-a640-47d1-819e-0518857464c4","Type":"ContainerStarted","Data":"b0a0a3a4e877aff944c8d275693f6d5b9c4bbbb866d49dc4e3b14fc26ffc28e1"} Dec 13 03:49:15 crc kubenswrapper[5070]: I1213 03:49:15.033274 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" podStartSLOduration=1.554734282 podStartE2EDuration="2.033256393s" podCreationTimestamp="2025-12-13 03:49:13 +0000 UTC" firstStartedPulling="2025-12-13 03:49:13.767579845 +0000 UTC m=+2246.003423391" lastFinishedPulling="2025-12-13 03:49:14.246101956 +0000 UTC m=+2246.481945502" observedRunningTime="2025-12-13 03:49:15.026981911 +0000 UTC m=+2247.262825467" watchObservedRunningTime="2025-12-13 03:49:15.033256393 +0000 UTC m=+2247.269099939" Dec 13 03:49:19 crc kubenswrapper[5070]: I1213 03:49:19.070056 5070 generic.go:334] "Generic (PLEG): container finished" podID="a603c4f1-a640-47d1-819e-0518857464c4" containerID="b0a0a3a4e877aff944c8d275693f6d5b9c4bbbb866d49dc4e3b14fc26ffc28e1" exitCode=0 Dec 13 03:49:19 crc kubenswrapper[5070]: I1213 03:49:19.070153 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" event={"ID":"a603c4f1-a640-47d1-819e-0518857464c4","Type":"ContainerDied","Data":"b0a0a3a4e877aff944c8d275693f6d5b9c4bbbb866d49dc4e3b14fc26ffc28e1"} Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.444658 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.510595 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ceph\") pod \"a603c4f1-a640-47d1-819e-0518857464c4\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.510654 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tfjc\" (UniqueName: \"kubernetes.io/projected/a603c4f1-a640-47d1-819e-0518857464c4-kube-api-access-6tfjc\") pod \"a603c4f1-a640-47d1-819e-0518857464c4\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.510805 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ssh-key\") pod \"a603c4f1-a640-47d1-819e-0518857464c4\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.510844 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-inventory\") pod \"a603c4f1-a640-47d1-819e-0518857464c4\" (UID: \"a603c4f1-a640-47d1-819e-0518857464c4\") " Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.517181 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ceph" (OuterVolumeSpecName: "ceph") pod "a603c4f1-a640-47d1-819e-0518857464c4" (UID: "a603c4f1-a640-47d1-819e-0518857464c4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.522691 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a603c4f1-a640-47d1-819e-0518857464c4-kube-api-access-6tfjc" (OuterVolumeSpecName: "kube-api-access-6tfjc") pod "a603c4f1-a640-47d1-819e-0518857464c4" (UID: "a603c4f1-a640-47d1-819e-0518857464c4"). InnerVolumeSpecName "kube-api-access-6tfjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.542379 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-inventory" (OuterVolumeSpecName: "inventory") pod "a603c4f1-a640-47d1-819e-0518857464c4" (UID: "a603c4f1-a640-47d1-819e-0518857464c4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.546296 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a603c4f1-a640-47d1-819e-0518857464c4" (UID: "a603c4f1-a640-47d1-819e-0518857464c4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.613773 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.613816 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.613832 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a603c4f1-a640-47d1-819e-0518857464c4-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:20 crc kubenswrapper[5070]: I1213 03:49:20.613848 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tfjc\" (UniqueName: \"kubernetes.io/projected/a603c4f1-a640-47d1-819e-0518857464c4-kube-api-access-6tfjc\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.089643 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" event={"ID":"a603c4f1-a640-47d1-819e-0518857464c4","Type":"ContainerDied","Data":"f7b1560ea1beb48fa1a918fbffca99e6203bf4639142bca5d0b22e4afbbc30b4"} Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.089702 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7b1560ea1beb48fa1a918fbffca99e6203bf4639142bca5d0b22e4afbbc30b4" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.089804 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.170939 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm"] Dec 13 03:49:21 crc kubenswrapper[5070]: E1213 03:49:21.171362 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a603c4f1-a640-47d1-819e-0518857464c4" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.171386 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="a603c4f1-a640-47d1-819e-0518857464c4" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.171644 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="a603c4f1-a640-47d1-819e-0518857464c4" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.172409 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.174951 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.174958 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.177160 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.177556 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.177785 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.196698 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm"] Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.325235 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfmjt\" (UniqueName: \"kubernetes.io/projected/d9ad7e67-58c9-4cee-8154-dc5119e96687-kube-api-access-kfmjt\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.325365 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.325395 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.325430 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.427529 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfmjt\" (UniqueName: \"kubernetes.io/projected/d9ad7e67-58c9-4cee-8154-dc5119e96687-kube-api-access-kfmjt\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.427660 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.427695 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.427727 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.435397 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.435878 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.443578 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.451171 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfmjt\" (UniqueName: \"kubernetes.io/projected/d9ad7e67-58c9-4cee-8154-dc5119e96687-kube-api-access-kfmjt\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qhqcm\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:21 crc kubenswrapper[5070]: I1213 03:49:21.491430 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:22 crc kubenswrapper[5070]: I1213 03:49:22.012106 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm"] Dec 13 03:49:22 crc kubenswrapper[5070]: I1213 03:49:22.099423 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" event={"ID":"d9ad7e67-58c9-4cee-8154-dc5119e96687","Type":"ContainerStarted","Data":"14eba869131030766fb8b9d5cd995d1b1cd2df93f52c71c1d02141f7224806da"} Dec 13 03:49:23 crc kubenswrapper[5070]: I1213 03:49:23.108841 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" event={"ID":"d9ad7e67-58c9-4cee-8154-dc5119e96687","Type":"ContainerStarted","Data":"17edb7d15320c1f6c036c2843f3339cf9db72e6dba342266133de3cf6aac45d6"} Dec 13 03:49:23 crc kubenswrapper[5070]: I1213 03:49:23.137203 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" podStartSLOduration=1.578153086 podStartE2EDuration="2.137169256s" podCreationTimestamp="2025-12-13 03:49:21 +0000 UTC" firstStartedPulling="2025-12-13 03:49:22.017683944 +0000 UTC m=+2254.253527490" lastFinishedPulling="2025-12-13 03:49:22.576700114 +0000 UTC m=+2254.812543660" observedRunningTime="2025-12-13 03:49:23.13295442 +0000 UTC m=+2255.368797966" watchObservedRunningTime="2025-12-13 03:49:23.137169256 +0000 UTC m=+2255.373012802" Dec 13 03:49:57 crc kubenswrapper[5070]: I1213 03:49:57.405222 5070 generic.go:334] "Generic (PLEG): container finished" podID="d9ad7e67-58c9-4cee-8154-dc5119e96687" containerID="17edb7d15320c1f6c036c2843f3339cf9db72e6dba342266133de3cf6aac45d6" exitCode=0 Dec 13 03:49:57 crc kubenswrapper[5070]: I1213 03:49:57.405293 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" event={"ID":"d9ad7e67-58c9-4cee-8154-dc5119e96687","Type":"ContainerDied","Data":"17edb7d15320c1f6c036c2843f3339cf9db72e6dba342266133de3cf6aac45d6"} Dec 13 03:49:58 crc kubenswrapper[5070]: I1213 03:49:58.838861 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:58 crc kubenswrapper[5070]: I1213 03:49:58.932355 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ceph\") pod \"d9ad7e67-58c9-4cee-8154-dc5119e96687\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " Dec 13 03:49:58 crc kubenswrapper[5070]: I1213 03:49:58.932407 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-inventory\") pod \"d9ad7e67-58c9-4cee-8154-dc5119e96687\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " Dec 13 03:49:58 crc kubenswrapper[5070]: I1213 03:49:58.932494 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfmjt\" (UniqueName: \"kubernetes.io/projected/d9ad7e67-58c9-4cee-8154-dc5119e96687-kube-api-access-kfmjt\") pod \"d9ad7e67-58c9-4cee-8154-dc5119e96687\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " Dec 13 03:49:58 crc kubenswrapper[5070]: I1213 03:49:58.932647 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ssh-key\") pod \"d9ad7e67-58c9-4cee-8154-dc5119e96687\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " Dec 13 03:49:58 crc kubenswrapper[5070]: I1213 03:49:58.938196 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ceph" (OuterVolumeSpecName: "ceph") pod "d9ad7e67-58c9-4cee-8154-dc5119e96687" (UID: "d9ad7e67-58c9-4cee-8154-dc5119e96687"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:49:58 crc kubenswrapper[5070]: I1213 03:49:58.939078 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9ad7e67-58c9-4cee-8154-dc5119e96687-kube-api-access-kfmjt" (OuterVolumeSpecName: "kube-api-access-kfmjt") pod "d9ad7e67-58c9-4cee-8154-dc5119e96687" (UID: "d9ad7e67-58c9-4cee-8154-dc5119e96687"). InnerVolumeSpecName "kube-api-access-kfmjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:49:58 crc kubenswrapper[5070]: E1213 03:49:58.955571 5070 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ssh-key podName:d9ad7e67-58c9-4cee-8154-dc5119e96687 nodeName:}" failed. No retries permitted until 2025-12-13 03:49:59.455545436 +0000 UTC m=+2291.691388982 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key" (UniqueName: "kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ssh-key") pod "d9ad7e67-58c9-4cee-8154-dc5119e96687" (UID: "d9ad7e67-58c9-4cee-8154-dc5119e96687") : error deleting /var/lib/kubelet/pods/d9ad7e67-58c9-4cee-8154-dc5119e96687/volume-subpaths: remove /var/lib/kubelet/pods/d9ad7e67-58c9-4cee-8154-dc5119e96687/volume-subpaths: no such file or directory Dec 13 03:49:58 crc kubenswrapper[5070]: I1213 03:49:58.960248 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-inventory" (OuterVolumeSpecName: "inventory") pod "d9ad7e67-58c9-4cee-8154-dc5119e96687" (UID: "d9ad7e67-58c9-4cee-8154-dc5119e96687"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.035004 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfmjt\" (UniqueName: \"kubernetes.io/projected/d9ad7e67-58c9-4cee-8154-dc5119e96687-kube-api-access-kfmjt\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.035042 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.035055 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.428109 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" event={"ID":"d9ad7e67-58c9-4cee-8154-dc5119e96687","Type":"ContainerDied","Data":"14eba869131030766fb8b9d5cd995d1b1cd2df93f52c71c1d02141f7224806da"} Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.428438 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14eba869131030766fb8b9d5cd995d1b1cd2df93f52c71c1d02141f7224806da" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.428161 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qhqcm" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.503387 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p"] Dec 13 03:49:59 crc kubenswrapper[5070]: E1213 03:49:59.503878 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9ad7e67-58c9-4cee-8154-dc5119e96687" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.503897 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9ad7e67-58c9-4cee-8154-dc5119e96687" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.504177 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9ad7e67-58c9-4cee-8154-dc5119e96687" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.506931 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.517416 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p"] Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.544534 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ssh-key\") pod \"d9ad7e67-58c9-4cee-8154-dc5119e96687\" (UID: \"d9ad7e67-58c9-4cee-8154-dc5119e96687\") " Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.548904 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d9ad7e67-58c9-4cee-8154-dc5119e96687" (UID: "d9ad7e67-58c9-4cee-8154-dc5119e96687"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.646392 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.646501 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm7jz\" (UniqueName: \"kubernetes.io/projected/3dc4fcae-76ad-430d-968e-9bb5e53d589e-kube-api-access-tm7jz\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.646570 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.646741 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.646967 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d9ad7e67-58c9-4cee-8154-dc5119e96687-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.748168 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.748611 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.748736 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.748781 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm7jz\" (UniqueName: \"kubernetes.io/projected/3dc4fcae-76ad-430d-968e-9bb5e53d589e-kube-api-access-tm7jz\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.754126 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.754238 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.754942 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.765536 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm7jz\" (UniqueName: \"kubernetes.io/projected/3dc4fcae-76ad-430d-968e-9bb5e53d589e-kube-api-access-tm7jz\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:49:59 crc kubenswrapper[5070]: I1213 03:49:59.835641 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:50:00 crc kubenswrapper[5070]: I1213 03:50:00.393003 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p"] Dec 13 03:50:00 crc kubenswrapper[5070]: I1213 03:50:00.436674 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" event={"ID":"3dc4fcae-76ad-430d-968e-9bb5e53d589e","Type":"ContainerStarted","Data":"9dc88da5826fdf142d93087ba1315f7ea2194655b4781c30ed627ad4862dd589"} Dec 13 03:50:02 crc kubenswrapper[5070]: I1213 03:50:02.458423 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" event={"ID":"3dc4fcae-76ad-430d-968e-9bb5e53d589e","Type":"ContainerStarted","Data":"3a69ba7f65bd81c41297819f2e31fdc3b1588e0da0b88e5534f85b7b71d8e612"} Dec 13 03:50:02 crc kubenswrapper[5070]: I1213 03:50:02.483703 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" podStartSLOduration=2.651201198 podStartE2EDuration="3.483686125s" podCreationTimestamp="2025-12-13 03:49:59 +0000 UTC" firstStartedPulling="2025-12-13 03:50:00.391239316 +0000 UTC m=+2292.627082862" lastFinishedPulling="2025-12-13 03:50:01.223724243 +0000 UTC m=+2293.459567789" observedRunningTime="2025-12-13 03:50:02.477582608 +0000 UTC m=+2294.713426174" watchObservedRunningTime="2025-12-13 03:50:02.483686125 +0000 UTC m=+2294.719529671" Dec 13 03:50:05 crc kubenswrapper[5070]: I1213 03:50:05.487795 5070 generic.go:334] "Generic (PLEG): container finished" podID="3dc4fcae-76ad-430d-968e-9bb5e53d589e" containerID="3a69ba7f65bd81c41297819f2e31fdc3b1588e0da0b88e5534f85b7b71d8e612" exitCode=0 Dec 13 03:50:05 crc kubenswrapper[5070]: I1213 03:50:05.487891 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" event={"ID":"3dc4fcae-76ad-430d-968e-9bb5e53d589e","Type":"ContainerDied","Data":"3a69ba7f65bd81c41297819f2e31fdc3b1588e0da0b88e5534f85b7b71d8e612"} Dec 13 03:50:06 crc kubenswrapper[5070]: I1213 03:50:06.894283 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.080874 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-inventory\") pod \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.080983 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tm7jz\" (UniqueName: \"kubernetes.io/projected/3dc4fcae-76ad-430d-968e-9bb5e53d589e-kube-api-access-tm7jz\") pod \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.081030 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ssh-key\") pod \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.082215 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ceph\") pod \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\" (UID: \"3dc4fcae-76ad-430d-968e-9bb5e53d589e\") " Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.088185 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ceph" (OuterVolumeSpecName: "ceph") pod "3dc4fcae-76ad-430d-968e-9bb5e53d589e" (UID: "3dc4fcae-76ad-430d-968e-9bb5e53d589e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.089472 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dc4fcae-76ad-430d-968e-9bb5e53d589e-kube-api-access-tm7jz" (OuterVolumeSpecName: "kube-api-access-tm7jz") pod "3dc4fcae-76ad-430d-968e-9bb5e53d589e" (UID: "3dc4fcae-76ad-430d-968e-9bb5e53d589e"). InnerVolumeSpecName "kube-api-access-tm7jz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.124375 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3dc4fcae-76ad-430d-968e-9bb5e53d589e" (UID: "3dc4fcae-76ad-430d-968e-9bb5e53d589e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.131250 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-inventory" (OuterVolumeSpecName: "inventory") pod "3dc4fcae-76ad-430d-968e-9bb5e53d589e" (UID: "3dc4fcae-76ad-430d-968e-9bb5e53d589e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.184348 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.184384 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tm7jz\" (UniqueName: \"kubernetes.io/projected/3dc4fcae-76ad-430d-968e-9bb5e53d589e-kube-api-access-tm7jz\") on node \"crc\" DevicePath \"\"" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.184396 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.184405 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3dc4fcae-76ad-430d-968e-9bb5e53d589e-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.508087 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" event={"ID":"3dc4fcae-76ad-430d-968e-9bb5e53d589e","Type":"ContainerDied","Data":"9dc88da5826fdf142d93087ba1315f7ea2194655b4781c30ed627ad4862dd589"} Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.508132 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dc88da5826fdf142d93087ba1315f7ea2194655b4781c30ed627ad4862dd589" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.508189 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.577904 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b"] Dec 13 03:50:07 crc kubenswrapper[5070]: E1213 03:50:07.578385 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc4fcae-76ad-430d-968e-9bb5e53d589e" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.578410 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc4fcae-76ad-430d-968e-9bb5e53d589e" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.578694 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dc4fcae-76ad-430d-968e-9bb5e53d589e" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.579490 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.581690 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.582703 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.582943 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.583120 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.583423 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.587774 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b"] Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.693712 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.693775 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.693836 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.693886 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2rmn\" (UniqueName: \"kubernetes.io/projected/3c7f94e1-5650-48ce-bccd-827f0cb55d76-kube-api-access-d2rmn\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.795527 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.795589 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.795626 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.795671 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2rmn\" (UniqueName: \"kubernetes.io/projected/3c7f94e1-5650-48ce-bccd-827f0cb55d76-kube-api-access-d2rmn\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.800900 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.801095 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.801151 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.814291 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2rmn\" (UniqueName: \"kubernetes.io/projected/3c7f94e1-5650-48ce-bccd-827f0cb55d76-kube-api-access-d2rmn\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:07 crc kubenswrapper[5070]: I1213 03:50:07.894982 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:08 crc kubenswrapper[5070]: I1213 03:50:08.453216 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b"] Dec 13 03:50:08 crc kubenswrapper[5070]: I1213 03:50:08.518811 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" event={"ID":"3c7f94e1-5650-48ce-bccd-827f0cb55d76","Type":"ContainerStarted","Data":"38d9f56bffce73f0340c150ddb8aefb74bf50843aa8beab92bcf0d3024021283"} Dec 13 03:50:09 crc kubenswrapper[5070]: I1213 03:50:09.528526 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" event={"ID":"3c7f94e1-5650-48ce-bccd-827f0cb55d76","Type":"ContainerStarted","Data":"b605e12f68d030710106fa1f4a7bd1adea79c3110e7b8048f22bc4dc4f16ce27"} Dec 13 03:50:09 crc kubenswrapper[5070]: I1213 03:50:09.554393 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" podStartSLOduration=1.862166329 podStartE2EDuration="2.554344216s" podCreationTimestamp="2025-12-13 03:50:07 +0000 UTC" firstStartedPulling="2025-12-13 03:50:08.465599478 +0000 UTC m=+2300.701443014" lastFinishedPulling="2025-12-13 03:50:09.157777355 +0000 UTC m=+2301.393620901" observedRunningTime="2025-12-13 03:50:09.547501978 +0000 UTC m=+2301.783345534" watchObservedRunningTime="2025-12-13 03:50:09.554344216 +0000 UTC m=+2301.790187762" Dec 13 03:50:49 crc kubenswrapper[5070]: I1213 03:50:49.913409 5070 generic.go:334] "Generic (PLEG): container finished" podID="3c7f94e1-5650-48ce-bccd-827f0cb55d76" containerID="b605e12f68d030710106fa1f4a7bd1adea79c3110e7b8048f22bc4dc4f16ce27" exitCode=0 Dec 13 03:50:49 crc kubenswrapper[5070]: I1213 03:50:49.913830 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" event={"ID":"3c7f94e1-5650-48ce-bccd-827f0cb55d76","Type":"ContainerDied","Data":"b605e12f68d030710106fa1f4a7bd1adea79c3110e7b8048f22bc4dc4f16ce27"} Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.276290 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.419231 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ssh-key\") pod \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.419274 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ceph\") pod \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.419348 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2rmn\" (UniqueName: \"kubernetes.io/projected/3c7f94e1-5650-48ce-bccd-827f0cb55d76-kube-api-access-d2rmn\") pod \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.419566 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-inventory\") pod \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\" (UID: \"3c7f94e1-5650-48ce-bccd-827f0cb55d76\") " Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.425338 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c7f94e1-5650-48ce-bccd-827f0cb55d76-kube-api-access-d2rmn" (OuterVolumeSpecName: "kube-api-access-d2rmn") pod "3c7f94e1-5650-48ce-bccd-827f0cb55d76" (UID: "3c7f94e1-5650-48ce-bccd-827f0cb55d76"). InnerVolumeSpecName "kube-api-access-d2rmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.429308 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ceph" (OuterVolumeSpecName: "ceph") pod "3c7f94e1-5650-48ce-bccd-827f0cb55d76" (UID: "3c7f94e1-5650-48ce-bccd-827f0cb55d76"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.448403 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3c7f94e1-5650-48ce-bccd-827f0cb55d76" (UID: "3c7f94e1-5650-48ce-bccd-827f0cb55d76"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.454740 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-inventory" (OuterVolumeSpecName: "inventory") pod "3c7f94e1-5650-48ce-bccd-827f0cb55d76" (UID: "3c7f94e1-5650-48ce-bccd-827f0cb55d76"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.521966 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.522007 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.522015 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3c7f94e1-5650-48ce-bccd-827f0cb55d76-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.522024 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2rmn\" (UniqueName: \"kubernetes.io/projected/3c7f94e1-5650-48ce-bccd-827f0cb55d76-kube-api-access-d2rmn\") on node \"crc\" DevicePath \"\"" Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.932612 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" event={"ID":"3c7f94e1-5650-48ce-bccd-827f0cb55d76","Type":"ContainerDied","Data":"38d9f56bffce73f0340c150ddb8aefb74bf50843aa8beab92bcf0d3024021283"} Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.932938 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38d9f56bffce73f0340c150ddb8aefb74bf50843aa8beab92bcf0d3024021283" Dec 13 03:50:51 crc kubenswrapper[5070]: I1213 03:50:51.932687 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.034131 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-c9pf8"] Dec 13 03:50:52 crc kubenswrapper[5070]: E1213 03:50:52.034638 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c7f94e1-5650-48ce-bccd-827f0cb55d76" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.034664 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c7f94e1-5650-48ce-bccd-827f0cb55d76" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.034892 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c7f94e1-5650-48ce-bccd-827f0cb55d76" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.035675 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.039720 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.039839 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.040012 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.040105 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.044503 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.048274 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-c9pf8"] Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.131547 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.131597 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25x84\" (UniqueName: \"kubernetes.io/projected/bfbab1e3-812a-4942-bc27-14e6c8754264-kube-api-access-25x84\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.131644 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.131674 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ceph\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.233675 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25x84\" (UniqueName: \"kubernetes.io/projected/bfbab1e3-812a-4942-bc27-14e6c8754264-kube-api-access-25x84\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.233807 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.233868 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ceph\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.234111 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.238398 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ceph\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.238419 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.238736 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.252631 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25x84\" (UniqueName: \"kubernetes.io/projected/bfbab1e3-812a-4942-bc27-14e6c8754264-kube-api-access-25x84\") pod \"ssh-known-hosts-edpm-deployment-c9pf8\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.354613 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.850725 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-c9pf8"] Dec 13 03:50:52 crc kubenswrapper[5070]: I1213 03:50:52.941758 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" event={"ID":"bfbab1e3-812a-4942-bc27-14e6c8754264","Type":"ContainerStarted","Data":"b626fe8f8c70d53ecd5d0a7d6bef89af74047ea050466cf5c8a60daaddfa2edd"} Dec 13 03:50:53 crc kubenswrapper[5070]: I1213 03:50:53.954274 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" event={"ID":"bfbab1e3-812a-4942-bc27-14e6c8754264","Type":"ContainerStarted","Data":"423b14da678b3b0303582f3cfb9e977d58517decb26a36d3fdf4295b309fe6a8"} Dec 13 03:50:53 crc kubenswrapper[5070]: I1213 03:50:53.979778 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" podStartSLOduration=1.4522626779999999 podStartE2EDuration="1.97974768s" podCreationTimestamp="2025-12-13 03:50:52 +0000 UTC" firstStartedPulling="2025-12-13 03:50:52.857699901 +0000 UTC m=+2345.093543447" lastFinishedPulling="2025-12-13 03:50:53.385184903 +0000 UTC m=+2345.621028449" observedRunningTime="2025-12-13 03:50:53.975488524 +0000 UTC m=+2346.211332070" watchObservedRunningTime="2025-12-13 03:50:53.97974768 +0000 UTC m=+2346.215591266" Dec 13 03:51:02 crc kubenswrapper[5070]: I1213 03:51:02.021155 5070 generic.go:334] "Generic (PLEG): container finished" podID="bfbab1e3-812a-4942-bc27-14e6c8754264" containerID="423b14da678b3b0303582f3cfb9e977d58517decb26a36d3fdf4295b309fe6a8" exitCode=0 Dec 13 03:51:02 crc kubenswrapper[5070]: I1213 03:51:02.021264 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" event={"ID":"bfbab1e3-812a-4942-bc27-14e6c8754264","Type":"ContainerDied","Data":"423b14da678b3b0303582f3cfb9e977d58517decb26a36d3fdf4295b309fe6a8"} Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.409458 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.548902 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ceph\") pod \"bfbab1e3-812a-4942-bc27-14e6c8754264\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.549280 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-inventory-0\") pod \"bfbab1e3-812a-4942-bc27-14e6c8754264\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.549495 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25x84\" (UniqueName: \"kubernetes.io/projected/bfbab1e3-812a-4942-bc27-14e6c8754264-kube-api-access-25x84\") pod \"bfbab1e3-812a-4942-bc27-14e6c8754264\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.549562 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ssh-key-openstack-edpm-ipam\") pod \"bfbab1e3-812a-4942-bc27-14e6c8754264\" (UID: \"bfbab1e3-812a-4942-bc27-14e6c8754264\") " Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.554337 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ceph" (OuterVolumeSpecName: "ceph") pod "bfbab1e3-812a-4942-bc27-14e6c8754264" (UID: "bfbab1e3-812a-4942-bc27-14e6c8754264"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.554547 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfbab1e3-812a-4942-bc27-14e6c8754264-kube-api-access-25x84" (OuterVolumeSpecName: "kube-api-access-25x84") pod "bfbab1e3-812a-4942-bc27-14e6c8754264" (UID: "bfbab1e3-812a-4942-bc27-14e6c8754264"). InnerVolumeSpecName "kube-api-access-25x84". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.575691 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "bfbab1e3-812a-4942-bc27-14e6c8754264" (UID: "bfbab1e3-812a-4942-bc27-14e6c8754264"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.576538 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "bfbab1e3-812a-4942-bc27-14e6c8754264" (UID: "bfbab1e3-812a-4942-bc27-14e6c8754264"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.651736 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25x84\" (UniqueName: \"kubernetes.io/projected/bfbab1e3-812a-4942-bc27-14e6c8754264-kube-api-access-25x84\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.651783 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.651797 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:03 crc kubenswrapper[5070]: I1213 03:51:03.651811 5070 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bfbab1e3-812a-4942-bc27-14e6c8754264-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.043431 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" event={"ID":"bfbab1e3-812a-4942-bc27-14e6c8754264","Type":"ContainerDied","Data":"b626fe8f8c70d53ecd5d0a7d6bef89af74047ea050466cf5c8a60daaddfa2edd"} Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.043620 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b626fe8f8c70d53ecd5d0a7d6bef89af74047ea050466cf5c8a60daaddfa2edd" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.043671 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-c9pf8" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.129562 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w"] Dec 13 03:51:04 crc kubenswrapper[5070]: E1213 03:51:04.130088 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfbab1e3-812a-4942-bc27-14e6c8754264" containerName="ssh-known-hosts-edpm-deployment" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.130117 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfbab1e3-812a-4942-bc27-14e6c8754264" containerName="ssh-known-hosts-edpm-deployment" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.130381 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfbab1e3-812a-4942-bc27-14e6c8754264" containerName="ssh-known-hosts-edpm-deployment" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.131668 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.134271 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.134296 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.134353 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.134432 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.137522 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.142082 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w"] Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.263054 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.263220 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.263269 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.263463 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bd2t\" (UniqueName: \"kubernetes.io/projected/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-kube-api-access-8bd2t\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.365718 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.366058 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.366176 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.366409 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bd2t\" (UniqueName: \"kubernetes.io/projected/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-kube-api-access-8bd2t\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.369066 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.369072 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.369148 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.387754 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bd2t\" (UniqueName: \"kubernetes.io/projected/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-kube-api-access-8bd2t\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vdj8w\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.447130 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:04 crc kubenswrapper[5070]: I1213 03:51:04.927587 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w"] Dec 13 03:51:05 crc kubenswrapper[5070]: I1213 03:51:05.052228 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" event={"ID":"a3440a4a-ac95-4afa-b10a-f95c600e4dcc","Type":"ContainerStarted","Data":"465a77297cef3200bee47979ba27c90ad80c30658406672554155adfc6d0f79c"} Dec 13 03:51:06 crc kubenswrapper[5070]: I1213 03:51:06.061191 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" event={"ID":"a3440a4a-ac95-4afa-b10a-f95c600e4dcc","Type":"ContainerStarted","Data":"03576561314ea048a8399cb27cd623c2287415e4804aadb015e6845380c182dd"} Dec 13 03:51:13 crc kubenswrapper[5070]: I1213 03:51:13.118272 5070 generic.go:334] "Generic (PLEG): container finished" podID="a3440a4a-ac95-4afa-b10a-f95c600e4dcc" containerID="03576561314ea048a8399cb27cd623c2287415e4804aadb015e6845380c182dd" exitCode=0 Dec 13 03:51:13 crc kubenswrapper[5070]: I1213 03:51:13.118363 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" event={"ID":"a3440a4a-ac95-4afa-b10a-f95c600e4dcc","Type":"ContainerDied","Data":"03576561314ea048a8399cb27cd623c2287415e4804aadb015e6845380c182dd"} Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.562702 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.653497 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ceph\") pod \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.653894 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-inventory\") pod \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.653941 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ssh-key\") pod \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.653999 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bd2t\" (UniqueName: \"kubernetes.io/projected/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-kube-api-access-8bd2t\") pod \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\" (UID: \"a3440a4a-ac95-4afa-b10a-f95c600e4dcc\") " Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.659478 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-kube-api-access-8bd2t" (OuterVolumeSpecName: "kube-api-access-8bd2t") pod "a3440a4a-ac95-4afa-b10a-f95c600e4dcc" (UID: "a3440a4a-ac95-4afa-b10a-f95c600e4dcc"). InnerVolumeSpecName "kube-api-access-8bd2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.659732 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ceph" (OuterVolumeSpecName: "ceph") pod "a3440a4a-ac95-4afa-b10a-f95c600e4dcc" (UID: "a3440a4a-ac95-4afa-b10a-f95c600e4dcc"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.680697 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-inventory" (OuterVolumeSpecName: "inventory") pod "a3440a4a-ac95-4afa-b10a-f95c600e4dcc" (UID: "a3440a4a-ac95-4afa-b10a-f95c600e4dcc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.683958 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a3440a4a-ac95-4afa-b10a-f95c600e4dcc" (UID: "a3440a4a-ac95-4afa-b10a-f95c600e4dcc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.756984 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.757046 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.757061 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:14 crc kubenswrapper[5070]: I1213 03:51:14.757078 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bd2t\" (UniqueName: \"kubernetes.io/projected/a3440a4a-ac95-4afa-b10a-f95c600e4dcc-kube-api-access-8bd2t\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.135686 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" event={"ID":"a3440a4a-ac95-4afa-b10a-f95c600e4dcc","Type":"ContainerDied","Data":"465a77297cef3200bee47979ba27c90ad80c30658406672554155adfc6d0f79c"} Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.135733 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="465a77297cef3200bee47979ba27c90ad80c30658406672554155adfc6d0f79c" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.135743 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vdj8w" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.213617 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf"] Dec 13 03:51:15 crc kubenswrapper[5070]: E1213 03:51:15.214007 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3440a4a-ac95-4afa-b10a-f95c600e4dcc" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.214022 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3440a4a-ac95-4afa-b10a-f95c600e4dcc" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.214193 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3440a4a-ac95-4afa-b10a-f95c600e4dcc" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.214797 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.222098 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.222320 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.222497 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.222693 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.222824 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.226047 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf"] Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.272697 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.272768 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.272831 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qltf8\" (UniqueName: \"kubernetes.io/projected/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-kube-api-access-qltf8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.272853 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.375697 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.375752 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.375805 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qltf8\" (UniqueName: \"kubernetes.io/projected/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-kube-api-access-qltf8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.375826 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.379401 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.379609 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.380122 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.392612 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qltf8\" (UniqueName: \"kubernetes.io/projected/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-kube-api-access-qltf8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:15 crc kubenswrapper[5070]: I1213 03:51:15.538336 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:16 crc kubenswrapper[5070]: I1213 03:51:16.080374 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf"] Dec 13 03:51:16 crc kubenswrapper[5070]: I1213 03:51:16.144870 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" event={"ID":"82cfb89d-1206-4fa9-881d-c8ff899d9ee8","Type":"ContainerStarted","Data":"21f2da7f16aa4e79bcbb44b064f5659dcbaefeef91030afdc2aceff797eb1c91"} Dec 13 03:51:17 crc kubenswrapper[5070]: I1213 03:51:17.159833 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" event={"ID":"82cfb89d-1206-4fa9-881d-c8ff899d9ee8","Type":"ContainerStarted","Data":"0c7d95367b2201ed8a82e0a96455f2110a546a75bad2c161e09f65d5c97e6dd1"} Dec 13 03:51:21 crc kubenswrapper[5070]: I1213 03:51:21.942870 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:51:21 crc kubenswrapper[5070]: I1213 03:51:21.943357 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:51:26 crc kubenswrapper[5070]: I1213 03:51:26.230821 5070 generic.go:334] "Generic (PLEG): container finished" podID="82cfb89d-1206-4fa9-881d-c8ff899d9ee8" containerID="0c7d95367b2201ed8a82e0a96455f2110a546a75bad2c161e09f65d5c97e6dd1" exitCode=0 Dec 13 03:51:26 crc kubenswrapper[5070]: I1213 03:51:26.231281 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" event={"ID":"82cfb89d-1206-4fa9-881d-c8ff899d9ee8","Type":"ContainerDied","Data":"0c7d95367b2201ed8a82e0a96455f2110a546a75bad2c161e09f65d5c97e6dd1"} Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.627727 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.729956 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ceph\") pod \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.730005 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ssh-key\") pod \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.730079 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qltf8\" (UniqueName: \"kubernetes.io/projected/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-kube-api-access-qltf8\") pod \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.730127 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-inventory\") pod \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\" (UID: \"82cfb89d-1206-4fa9-881d-c8ff899d9ee8\") " Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.751639 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ceph" (OuterVolumeSpecName: "ceph") pod "82cfb89d-1206-4fa9-881d-c8ff899d9ee8" (UID: "82cfb89d-1206-4fa9-881d-c8ff899d9ee8"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.756679 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-kube-api-access-qltf8" (OuterVolumeSpecName: "kube-api-access-qltf8") pod "82cfb89d-1206-4fa9-881d-c8ff899d9ee8" (UID: "82cfb89d-1206-4fa9-881d-c8ff899d9ee8"). InnerVolumeSpecName "kube-api-access-qltf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.785262 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "82cfb89d-1206-4fa9-881d-c8ff899d9ee8" (UID: "82cfb89d-1206-4fa9-881d-c8ff899d9ee8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.810682 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-inventory" (OuterVolumeSpecName: "inventory") pod "82cfb89d-1206-4fa9-881d-c8ff899d9ee8" (UID: "82cfb89d-1206-4fa9-881d-c8ff899d9ee8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.832150 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.832202 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.832216 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qltf8\" (UniqueName: \"kubernetes.io/projected/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-kube-api-access-qltf8\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:27 crc kubenswrapper[5070]: I1213 03:51:27.832230 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/82cfb89d-1206-4fa9-881d-c8ff899d9ee8-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.251354 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" event={"ID":"82cfb89d-1206-4fa9-881d-c8ff899d9ee8","Type":"ContainerDied","Data":"21f2da7f16aa4e79bcbb44b064f5659dcbaefeef91030afdc2aceff797eb1c91"} Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.251761 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21f2da7f16aa4e79bcbb44b064f5659dcbaefeef91030afdc2aceff797eb1c91" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.251484 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.351040 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm"] Dec 13 03:51:28 crc kubenswrapper[5070]: E1213 03:51:28.351638 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82cfb89d-1206-4fa9-881d-c8ff899d9ee8" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.351673 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="82cfb89d-1206-4fa9-881d-c8ff899d9ee8" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.351940 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="82cfb89d-1206-4fa9-881d-c8ff899d9ee8" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.352756 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.356551 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.356901 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.357160 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.356948 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.358581 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.359019 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.359513 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.360526 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.384219 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm"] Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442475 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442517 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442562 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442686 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fqr8\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-kube-api-access-9fqr8\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442722 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442753 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442773 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442793 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442814 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442873 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442929 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442966 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.442985 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.544842 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.544889 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.544919 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.544948 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.544975 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.545026 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.545077 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fqr8\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-kube-api-access-9fqr8\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.545120 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.545171 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.545207 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.545245 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.545283 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.545387 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.549249 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.549501 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.550239 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.550700 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.550921 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.551218 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.552135 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.552946 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.553043 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.554255 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.557126 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.560609 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.581376 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fqr8\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-kube-api-access-9fqr8\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:28 crc kubenswrapper[5070]: I1213 03:51:28.671874 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:51:29 crc kubenswrapper[5070]: I1213 03:51:29.181755 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm"] Dec 13 03:51:29 crc kubenswrapper[5070]: I1213 03:51:29.262822 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" event={"ID":"55e3e391-74a5-4af9-85de-df93ac3155e0","Type":"ContainerStarted","Data":"2fdb71ff9e288477fbeee5aadda9c58958ca1a67f6b7460d3f3c60a55d237988"} Dec 13 03:51:30 crc kubenswrapper[5070]: I1213 03:51:30.276139 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" event={"ID":"55e3e391-74a5-4af9-85de-df93ac3155e0","Type":"ContainerStarted","Data":"a829369b72122dac0f2a65fcd4b722d3b9853f777c5797e27480f7204e1c6d71"} Dec 13 03:51:30 crc kubenswrapper[5070]: I1213 03:51:30.310228 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" podStartSLOduration=1.851330093 podStartE2EDuration="2.310204138s" podCreationTimestamp="2025-12-13 03:51:28 +0000 UTC" firstStartedPulling="2025-12-13 03:51:29.186217186 +0000 UTC m=+2381.422060732" lastFinishedPulling="2025-12-13 03:51:29.645091211 +0000 UTC m=+2381.880934777" observedRunningTime="2025-12-13 03:51:30.305223622 +0000 UTC m=+2382.541067208" watchObservedRunningTime="2025-12-13 03:51:30.310204138 +0000 UTC m=+2382.546047704" Dec 13 03:51:51 crc kubenswrapper[5070]: I1213 03:51:51.942959 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:51:51 crc kubenswrapper[5070]: I1213 03:51:51.943597 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:52:01 crc kubenswrapper[5070]: E1213 03:52:01.357538 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55e3e391_74a5_4af9_85de_df93ac3155e0.slice/crio-conmon-a829369b72122dac0f2a65fcd4b722d3b9853f777c5797e27480f7204e1c6d71.scope\": RecentStats: unable to find data in memory cache]" Dec 13 03:52:01 crc kubenswrapper[5070]: I1213 03:52:01.534792 5070 generic.go:334] "Generic (PLEG): container finished" podID="55e3e391-74a5-4af9-85de-df93ac3155e0" containerID="a829369b72122dac0f2a65fcd4b722d3b9853f777c5797e27480f7204e1c6d71" exitCode=0 Dec 13 03:52:01 crc kubenswrapper[5070]: I1213 03:52:01.534858 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" event={"ID":"55e3e391-74a5-4af9-85de-df93ac3155e0","Type":"ContainerDied","Data":"a829369b72122dac0f2a65fcd4b722d3b9853f777c5797e27480f7204e1c6d71"} Dec 13 03:52:02 crc kubenswrapper[5070]: I1213 03:52:02.942081 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.067862 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-libvirt-combined-ca-bundle\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.067903 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ovn-combined-ca-bundle\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.067956 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ssh-key\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.067975 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-neutron-metadata-combined-ca-bundle\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.068013 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.068034 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-repo-setup-combined-ca-bundle\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.068061 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fqr8\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-kube-api-access-9fqr8\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.068084 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-inventory\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.068107 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-ovn-default-certs-0\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.068604 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-nova-combined-ca-bundle\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.068634 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ceph\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.068721 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-bootstrap-combined-ca-bundle\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.068783 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"55e3e391-74a5-4af9-85de-df93ac3155e0\" (UID: \"55e3e391-74a5-4af9-85de-df93ac3155e0\") " Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.075094 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.076301 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.076642 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.076887 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.077054 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.077186 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.077406 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.078945 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-kube-api-access-9fqr8" (OuterVolumeSpecName: "kube-api-access-9fqr8") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "kube-api-access-9fqr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.079408 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ceph" (OuterVolumeSpecName: "ceph") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.079869 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.080557 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.101306 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.101585 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-inventory" (OuterVolumeSpecName: "inventory") pod "55e3e391-74a5-4af9-85de-df93ac3155e0" (UID: "55e3e391-74a5-4af9-85de-df93ac3155e0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172003 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172115 5070 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172169 5070 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172262 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172303 5070 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172371 5070 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172406 5070 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172471 5070 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172501 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172528 5070 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172563 5070 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172591 5070 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55e3e391-74a5-4af9-85de-df93ac3155e0-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.172619 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fqr8\" (UniqueName: \"kubernetes.io/projected/55e3e391-74a5-4af9-85de-df93ac3155e0-kube-api-access-9fqr8\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.568403 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" event={"ID":"55e3e391-74a5-4af9-85de-df93ac3155e0","Type":"ContainerDied","Data":"2fdb71ff9e288477fbeee5aadda9c58958ca1a67f6b7460d3f3c60a55d237988"} Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.568467 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fdb71ff9e288477fbeee5aadda9c58958ca1a67f6b7460d3f3c60a55d237988" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.568558 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.676358 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7"] Dec 13 03:52:03 crc kubenswrapper[5070]: E1213 03:52:03.677846 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55e3e391-74a5-4af9-85de-df93ac3155e0" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.677970 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="55e3e391-74a5-4af9-85de-df93ac3155e0" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.678256 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="55e3e391-74a5-4af9-85de-df93ac3155e0" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.679146 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.689480 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.689728 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.689824 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.690006 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.690294 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.691240 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.691495 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.691565 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rg8l\" (UniqueName: \"kubernetes.io/projected/dcb9f2b5-3392-400a-9071-39b873d26bca-kube-api-access-7rg8l\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.691741 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.708924 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7"] Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.793837 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.793899 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rg8l\" (UniqueName: \"kubernetes.io/projected/dcb9f2b5-3392-400a-9071-39b873d26bca-kube-api-access-7rg8l\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.794314 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.794385 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.799264 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.803239 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.805661 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:03 crc kubenswrapper[5070]: I1213 03:52:03.813093 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rg8l\" (UniqueName: \"kubernetes.io/projected/dcb9f2b5-3392-400a-9071-39b873d26bca-kube-api-access-7rg8l\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:04 crc kubenswrapper[5070]: I1213 03:52:04.010248 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:04 crc kubenswrapper[5070]: I1213 03:52:04.548194 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7"] Dec 13 03:52:04 crc kubenswrapper[5070]: I1213 03:52:04.576558 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" event={"ID":"dcb9f2b5-3392-400a-9071-39b873d26bca","Type":"ContainerStarted","Data":"8220e4f0a5698b220d499c0caf7f7482e1022066423cbe7036f63cb0ab8a9372"} Dec 13 03:52:05 crc kubenswrapper[5070]: I1213 03:52:05.586913 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" event={"ID":"dcb9f2b5-3392-400a-9071-39b873d26bca","Type":"ContainerStarted","Data":"7f01a6ebcbe5dda940c419de69c8f25efdbb7392ccc8676022cfe58d435af12e"} Dec 13 03:52:05 crc kubenswrapper[5070]: I1213 03:52:05.620921 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" podStartSLOduration=2.137078898 podStartE2EDuration="2.620901086s" podCreationTimestamp="2025-12-13 03:52:03 +0000 UTC" firstStartedPulling="2025-12-13 03:52:04.562374714 +0000 UTC m=+2416.798218260" lastFinishedPulling="2025-12-13 03:52:05.046196902 +0000 UTC m=+2417.282040448" observedRunningTime="2025-12-13 03:52:05.613925244 +0000 UTC m=+2417.849768800" watchObservedRunningTime="2025-12-13 03:52:05.620901086 +0000 UTC m=+2417.856744642" Dec 13 03:52:10 crc kubenswrapper[5070]: I1213 03:52:10.635627 5070 generic.go:334] "Generic (PLEG): container finished" podID="dcb9f2b5-3392-400a-9071-39b873d26bca" containerID="7f01a6ebcbe5dda940c419de69c8f25efdbb7392ccc8676022cfe58d435af12e" exitCode=0 Dec 13 03:52:10 crc kubenswrapper[5070]: I1213 03:52:10.635790 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" event={"ID":"dcb9f2b5-3392-400a-9071-39b873d26bca","Type":"ContainerDied","Data":"7f01a6ebcbe5dda940c419de69c8f25efdbb7392ccc8676022cfe58d435af12e"} Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.063956 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.259558 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ssh-key\") pod \"dcb9f2b5-3392-400a-9071-39b873d26bca\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.259624 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rg8l\" (UniqueName: \"kubernetes.io/projected/dcb9f2b5-3392-400a-9071-39b873d26bca-kube-api-access-7rg8l\") pod \"dcb9f2b5-3392-400a-9071-39b873d26bca\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.259714 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ceph\") pod \"dcb9f2b5-3392-400a-9071-39b873d26bca\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.260543 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-inventory\") pod \"dcb9f2b5-3392-400a-9071-39b873d26bca\" (UID: \"dcb9f2b5-3392-400a-9071-39b873d26bca\") " Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.266554 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ceph" (OuterVolumeSpecName: "ceph") pod "dcb9f2b5-3392-400a-9071-39b873d26bca" (UID: "dcb9f2b5-3392-400a-9071-39b873d26bca"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.271684 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcb9f2b5-3392-400a-9071-39b873d26bca-kube-api-access-7rg8l" (OuterVolumeSpecName: "kube-api-access-7rg8l") pod "dcb9f2b5-3392-400a-9071-39b873d26bca" (UID: "dcb9f2b5-3392-400a-9071-39b873d26bca"). InnerVolumeSpecName "kube-api-access-7rg8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.288799 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-inventory" (OuterVolumeSpecName: "inventory") pod "dcb9f2b5-3392-400a-9071-39b873d26bca" (UID: "dcb9f2b5-3392-400a-9071-39b873d26bca"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.294145 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "dcb9f2b5-3392-400a-9071-39b873d26bca" (UID: "dcb9f2b5-3392-400a-9071-39b873d26bca"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.363406 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.363482 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rg8l\" (UniqueName: \"kubernetes.io/projected/dcb9f2b5-3392-400a-9071-39b873d26bca-kube-api-access-7rg8l\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.363496 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.363505 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dcb9f2b5-3392-400a-9071-39b873d26bca-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.656015 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" event={"ID":"dcb9f2b5-3392-400a-9071-39b873d26bca","Type":"ContainerDied","Data":"8220e4f0a5698b220d499c0caf7f7482e1022066423cbe7036f63cb0ab8a9372"} Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.656074 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8220e4f0a5698b220d499c0caf7f7482e1022066423cbe7036f63cb0ab8a9372" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.656102 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.735726 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7"] Dec 13 03:52:12 crc kubenswrapper[5070]: E1213 03:52:12.736082 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcb9f2b5-3392-400a-9071-39b873d26bca" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.736100 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcb9f2b5-3392-400a-9071-39b873d26bca" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.736312 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcb9f2b5-3392-400a-9071-39b873d26bca" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.736995 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.740554 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.740807 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.740969 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.741050 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.740987 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.742361 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.746898 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7"] Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.874844 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.874951 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6456\" (UniqueName: \"kubernetes.io/projected/ae4ad2bd-aeff-4812-b171-7630319ad71e-kube-api-access-z6456\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.875117 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.875202 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.875252 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.875287 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.977696 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.977780 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6456\" (UniqueName: \"kubernetes.io/projected/ae4ad2bd-aeff-4812-b171-7630319ad71e-kube-api-access-z6456\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.977866 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.977916 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.977944 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.977960 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.979011 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.982865 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.984120 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.985648 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.988249 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:12 crc kubenswrapper[5070]: I1213 03:52:12.998704 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6456\" (UniqueName: \"kubernetes.io/projected/ae4ad2bd-aeff-4812-b171-7630319ad71e-kube-api-access-z6456\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-59xp7\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:13 crc kubenswrapper[5070]: I1213 03:52:13.059636 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:52:13 crc kubenswrapper[5070]: I1213 03:52:13.582224 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7"] Dec 13 03:52:13 crc kubenswrapper[5070]: I1213 03:52:13.665359 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" event={"ID":"ae4ad2bd-aeff-4812-b171-7630319ad71e","Type":"ContainerStarted","Data":"701bce3e615543c7eef7bc79c997b32af940d309d1b39cd859b49c9f8206ee39"} Dec 13 03:52:14 crc kubenswrapper[5070]: I1213 03:52:14.674698 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" event={"ID":"ae4ad2bd-aeff-4812-b171-7630319ad71e","Type":"ContainerStarted","Data":"9b9837c49817650e64c96e22d33df06b0b6f252aad448ac2f1126adc73d36736"} Dec 13 03:52:14 crc kubenswrapper[5070]: I1213 03:52:14.694712 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" podStartSLOduration=1.951471507 podStartE2EDuration="2.694694712s" podCreationTimestamp="2025-12-13 03:52:12 +0000 UTC" firstStartedPulling="2025-12-13 03:52:13.589396971 +0000 UTC m=+2425.825240517" lastFinishedPulling="2025-12-13 03:52:14.332620156 +0000 UTC m=+2426.568463722" observedRunningTime="2025-12-13 03:52:14.693240112 +0000 UTC m=+2426.929083678" watchObservedRunningTime="2025-12-13 03:52:14.694694712 +0000 UTC m=+2426.930538258" Dec 13 03:52:21 crc kubenswrapper[5070]: I1213 03:52:21.942981 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:52:21 crc kubenswrapper[5070]: I1213 03:52:21.943523 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 03:52:21 crc kubenswrapper[5070]: I1213 03:52:21.943573 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 03:52:21 crc kubenswrapper[5070]: I1213 03:52:21.944421 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 03:52:21 crc kubenswrapper[5070]: I1213 03:52:21.944598 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" gracePeriod=600 Dec 13 03:52:22 crc kubenswrapper[5070]: E1213 03:52:22.075943 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:52:22 crc kubenswrapper[5070]: I1213 03:52:22.744678 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" exitCode=0 Dec 13 03:52:22 crc kubenswrapper[5070]: I1213 03:52:22.744889 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17"} Dec 13 03:52:22 crc kubenswrapper[5070]: I1213 03:52:22.745032 5070 scope.go:117] "RemoveContainer" containerID="780d0740a03317cdcccec08fe9d993d16c77a58a86849080dfe009ba45672bf2" Dec 13 03:52:22 crc kubenswrapper[5070]: I1213 03:52:22.746088 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:52:22 crc kubenswrapper[5070]: E1213 03:52:22.746361 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:52:35 crc kubenswrapper[5070]: I1213 03:52:35.167167 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:52:35 crc kubenswrapper[5070]: E1213 03:52:35.167903 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:52:50 crc kubenswrapper[5070]: I1213 03:52:50.167152 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:52:50 crc kubenswrapper[5070]: E1213 03:52:50.167977 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:53:04 crc kubenswrapper[5070]: I1213 03:53:04.167485 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:53:04 crc kubenswrapper[5070]: E1213 03:53:04.168109 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:53:18 crc kubenswrapper[5070]: I1213 03:53:18.173055 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:53:18 crc kubenswrapper[5070]: E1213 03:53:18.174298 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:53:28 crc kubenswrapper[5070]: I1213 03:53:28.322875 5070 generic.go:334] "Generic (PLEG): container finished" podID="ae4ad2bd-aeff-4812-b171-7630319ad71e" containerID="9b9837c49817650e64c96e22d33df06b0b6f252aad448ac2f1126adc73d36736" exitCode=0 Dec 13 03:53:28 crc kubenswrapper[5070]: I1213 03:53:28.322949 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" event={"ID":"ae4ad2bd-aeff-4812-b171-7630319ad71e","Type":"ContainerDied","Data":"9b9837c49817650e64c96e22d33df06b0b6f252aad448ac2f1126adc73d36736"} Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.813120 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.888102 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ssh-key\") pod \"ae4ad2bd-aeff-4812-b171-7630319ad71e\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.888188 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovn-combined-ca-bundle\") pod \"ae4ad2bd-aeff-4812-b171-7630319ad71e\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.888391 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6456\" (UniqueName: \"kubernetes.io/projected/ae4ad2bd-aeff-4812-b171-7630319ad71e-kube-api-access-z6456\") pod \"ae4ad2bd-aeff-4812-b171-7630319ad71e\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.888479 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-inventory\") pod \"ae4ad2bd-aeff-4812-b171-7630319ad71e\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.888536 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovncontroller-config-0\") pod \"ae4ad2bd-aeff-4812-b171-7630319ad71e\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.888584 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ceph\") pod \"ae4ad2bd-aeff-4812-b171-7630319ad71e\" (UID: \"ae4ad2bd-aeff-4812-b171-7630319ad71e\") " Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.894815 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae4ad2bd-aeff-4812-b171-7630319ad71e-kube-api-access-z6456" (OuterVolumeSpecName: "kube-api-access-z6456") pod "ae4ad2bd-aeff-4812-b171-7630319ad71e" (UID: "ae4ad2bd-aeff-4812-b171-7630319ad71e"). InnerVolumeSpecName "kube-api-access-z6456". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.895033 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ae4ad2bd-aeff-4812-b171-7630319ad71e" (UID: "ae4ad2bd-aeff-4812-b171-7630319ad71e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.896123 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ceph" (OuterVolumeSpecName: "ceph") pod "ae4ad2bd-aeff-4812-b171-7630319ad71e" (UID: "ae4ad2bd-aeff-4812-b171-7630319ad71e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.915540 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "ae4ad2bd-aeff-4812-b171-7630319ad71e" (UID: "ae4ad2bd-aeff-4812-b171-7630319ad71e"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.915762 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-inventory" (OuterVolumeSpecName: "inventory") pod "ae4ad2bd-aeff-4812-b171-7630319ad71e" (UID: "ae4ad2bd-aeff-4812-b171-7630319ad71e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.926737 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ae4ad2bd-aeff-4812-b171-7630319ad71e" (UID: "ae4ad2bd-aeff-4812-b171-7630319ad71e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.990953 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.990990 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.991002 5070 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.991014 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6456\" (UniqueName: \"kubernetes.io/projected/ae4ad2bd-aeff-4812-b171-7630319ad71e-kube-api-access-z6456\") on node \"crc\" DevicePath \"\"" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.991024 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae4ad2bd-aeff-4812-b171-7630319ad71e-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:53:29 crc kubenswrapper[5070]: I1213 03:53:29.991033 5070 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ae4ad2bd-aeff-4812-b171-7630319ad71e-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.345037 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" event={"ID":"ae4ad2bd-aeff-4812-b171-7630319ad71e","Type":"ContainerDied","Data":"701bce3e615543c7eef7bc79c997b32af940d309d1b39cd859b49c9f8206ee39"} Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.345082 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="701bce3e615543c7eef7bc79c997b32af940d309d1b39cd859b49c9f8206ee39" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.345112 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-59xp7" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.452128 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4"] Dec 13 03:53:30 crc kubenswrapper[5070]: E1213 03:53:30.452582 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae4ad2bd-aeff-4812-b171-7630319ad71e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.452604 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae4ad2bd-aeff-4812-b171-7630319ad71e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.452781 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae4ad2bd-aeff-4812-b171-7630319ad71e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.453434 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.459050 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.459378 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.459489 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.459529 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.459652 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.460143 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.460409 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.472209 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4"] Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.602501 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.602920 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.603162 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.603232 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2f4k\" (UniqueName: \"kubernetes.io/projected/c87c7a56-123d-47b8-8e94-245995b89e61-kube-api-access-r2f4k\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.603359 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.603542 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.603682 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.706017 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.706406 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.706556 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2f4k\" (UniqueName: \"kubernetes.io/projected/c87c7a56-123d-47b8-8e94-245995b89e61-kube-api-access-r2f4k\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.706679 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.706868 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.707065 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.707486 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.712039 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.712163 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.712502 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.712992 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.714010 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.714419 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.728653 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2f4k\" (UniqueName: \"kubernetes.io/projected/c87c7a56-123d-47b8-8e94-245995b89e61-kube-api-access-r2f4k\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:30 crc kubenswrapper[5070]: I1213 03:53:30.770946 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:53:31 crc kubenswrapper[5070]: I1213 03:53:31.312680 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4"] Dec 13 03:53:31 crc kubenswrapper[5070]: I1213 03:53:31.356883 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" event={"ID":"c87c7a56-123d-47b8-8e94-245995b89e61","Type":"ContainerStarted","Data":"3a560ee7100f90b07cf828192b7aa8bb3a18e573fbff2cd1a2caecd125d06de7"} Dec 13 03:53:32 crc kubenswrapper[5070]: I1213 03:53:32.367051 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" event={"ID":"c87c7a56-123d-47b8-8e94-245995b89e61","Type":"ContainerStarted","Data":"27072baaacf84fff2aae4397aa7c57b24aef6edd5c33b86ec5f221f7ca838789"} Dec 13 03:53:32 crc kubenswrapper[5070]: I1213 03:53:32.386753 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" podStartSLOduration=1.857896227 podStartE2EDuration="2.386724395s" podCreationTimestamp="2025-12-13 03:53:30 +0000 UTC" firstStartedPulling="2025-12-13 03:53:31.317396339 +0000 UTC m=+2503.553239885" lastFinishedPulling="2025-12-13 03:53:31.846224507 +0000 UTC m=+2504.082068053" observedRunningTime="2025-12-13 03:53:32.383822345 +0000 UTC m=+2504.619665891" watchObservedRunningTime="2025-12-13 03:53:32.386724395 +0000 UTC m=+2504.622567941" Dec 13 03:53:33 crc kubenswrapper[5070]: I1213 03:53:33.167212 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:53:33 crc kubenswrapper[5070]: E1213 03:53:33.167528 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:53:46 crc kubenswrapper[5070]: I1213 03:53:46.167377 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:53:46 crc kubenswrapper[5070]: E1213 03:53:46.168100 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:53:57 crc kubenswrapper[5070]: I1213 03:53:57.167676 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:53:57 crc kubenswrapper[5070]: E1213 03:53:57.168556 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:54:08 crc kubenswrapper[5070]: I1213 03:54:08.172348 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:54:08 crc kubenswrapper[5070]: E1213 03:54:08.173132 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.105270 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2wwlv"] Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.107998 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.121180 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2wwlv"] Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.158791 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-utilities\") pod \"community-operators-2wwlv\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.158879 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-catalog-content\") pod \"community-operators-2wwlv\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.158959 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndh9b\" (UniqueName: \"kubernetes.io/projected/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-kube-api-access-ndh9b\") pod \"community-operators-2wwlv\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.261711 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-utilities\") pod \"community-operators-2wwlv\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.261867 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-utilities\") pod \"community-operators-2wwlv\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.262148 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-catalog-content\") pod \"community-operators-2wwlv\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.262328 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndh9b\" (UniqueName: \"kubernetes.io/projected/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-kube-api-access-ndh9b\") pod \"community-operators-2wwlv\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.262661 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-catalog-content\") pod \"community-operators-2wwlv\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.288692 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndh9b\" (UniqueName: \"kubernetes.io/projected/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-kube-api-access-ndh9b\") pod \"community-operators-2wwlv\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.430985 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:16 crc kubenswrapper[5070]: I1213 03:54:16.973164 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2wwlv"] Dec 13 03:54:17 crc kubenswrapper[5070]: I1213 03:54:17.769173 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2wwlv" event={"ID":"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1","Type":"ContainerStarted","Data":"656317df0a70b0c177f1603c0e629a9fdd205e56ee8193738313bc3b978f3ea8"} Dec 13 03:54:19 crc kubenswrapper[5070]: I1213 03:54:19.794415 5070 generic.go:334] "Generic (PLEG): container finished" podID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerID="d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3" exitCode=0 Dec 13 03:54:19 crc kubenswrapper[5070]: I1213 03:54:19.794500 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2wwlv" event={"ID":"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1","Type":"ContainerDied","Data":"d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3"} Dec 13 03:54:19 crc kubenswrapper[5070]: I1213 03:54:19.798440 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 03:54:20 crc kubenswrapper[5070]: I1213 03:54:20.166967 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:54:20 crc kubenswrapper[5070]: E1213 03:54:20.167270 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:54:20 crc kubenswrapper[5070]: I1213 03:54:20.804260 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2wwlv" event={"ID":"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1","Type":"ContainerStarted","Data":"d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d"} Dec 13 03:54:21 crc kubenswrapper[5070]: I1213 03:54:21.812845 5070 generic.go:334] "Generic (PLEG): container finished" podID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerID="d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d" exitCode=0 Dec 13 03:54:21 crc kubenswrapper[5070]: I1213 03:54:21.812898 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2wwlv" event={"ID":"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1","Type":"ContainerDied","Data":"d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d"} Dec 13 03:54:22 crc kubenswrapper[5070]: I1213 03:54:22.830100 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2wwlv" event={"ID":"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1","Type":"ContainerStarted","Data":"aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7"} Dec 13 03:54:22 crc kubenswrapper[5070]: I1213 03:54:22.853979 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2wwlv" podStartSLOduration=4.359580402 podStartE2EDuration="6.853962568s" podCreationTimestamp="2025-12-13 03:54:16 +0000 UTC" firstStartedPulling="2025-12-13 03:54:19.798207424 +0000 UTC m=+2552.034050970" lastFinishedPulling="2025-12-13 03:54:22.29258959 +0000 UTC m=+2554.528433136" observedRunningTime="2025-12-13 03:54:22.851311266 +0000 UTC m=+2555.087154812" watchObservedRunningTime="2025-12-13 03:54:22.853962568 +0000 UTC m=+2555.089806114" Dec 13 03:54:26 crc kubenswrapper[5070]: I1213 03:54:26.431134 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:26 crc kubenswrapper[5070]: I1213 03:54:26.431776 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:26 crc kubenswrapper[5070]: I1213 03:54:26.480995 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:32 crc kubenswrapper[5070]: I1213 03:54:32.168327 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:54:32 crc kubenswrapper[5070]: E1213 03:54:32.169079 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:54:33 crc kubenswrapper[5070]: I1213 03:54:33.934736 5070 generic.go:334] "Generic (PLEG): container finished" podID="c87c7a56-123d-47b8-8e94-245995b89e61" containerID="27072baaacf84fff2aae4397aa7c57b24aef6edd5c33b86ec5f221f7ca838789" exitCode=0 Dec 13 03:54:33 crc kubenswrapper[5070]: I1213 03:54:33.935055 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" event={"ID":"c87c7a56-123d-47b8-8e94-245995b89e61","Type":"ContainerDied","Data":"27072baaacf84fff2aae4397aa7c57b24aef6edd5c33b86ec5f221f7ca838789"} Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.339900 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.366868 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-inventory\") pod \"c87c7a56-123d-47b8-8e94-245995b89e61\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.366943 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-nova-metadata-neutron-config-0\") pod \"c87c7a56-123d-47b8-8e94-245995b89e61\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.366968 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ceph\") pod \"c87c7a56-123d-47b8-8e94-245995b89e61\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.367737 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2f4k\" (UniqueName: \"kubernetes.io/projected/c87c7a56-123d-47b8-8e94-245995b89e61-kube-api-access-r2f4k\") pod \"c87c7a56-123d-47b8-8e94-245995b89e61\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.367784 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-metadata-combined-ca-bundle\") pod \"c87c7a56-123d-47b8-8e94-245995b89e61\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.367890 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ssh-key\") pod \"c87c7a56-123d-47b8-8e94-245995b89e61\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.367923 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-ovn-metadata-agent-neutron-config-0\") pod \"c87c7a56-123d-47b8-8e94-245995b89e61\" (UID: \"c87c7a56-123d-47b8-8e94-245995b89e61\") " Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.373326 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c87c7a56-123d-47b8-8e94-245995b89e61-kube-api-access-r2f4k" (OuterVolumeSpecName: "kube-api-access-r2f4k") pod "c87c7a56-123d-47b8-8e94-245995b89e61" (UID: "c87c7a56-123d-47b8-8e94-245995b89e61"). InnerVolumeSpecName "kube-api-access-r2f4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.373357 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "c87c7a56-123d-47b8-8e94-245995b89e61" (UID: "c87c7a56-123d-47b8-8e94-245995b89e61"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.374683 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ceph" (OuterVolumeSpecName: "ceph") pod "c87c7a56-123d-47b8-8e94-245995b89e61" (UID: "c87c7a56-123d-47b8-8e94-245995b89e61"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.397538 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "c87c7a56-123d-47b8-8e94-245995b89e61" (UID: "c87c7a56-123d-47b8-8e94-245995b89e61"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.400827 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "c87c7a56-123d-47b8-8e94-245995b89e61" (UID: "c87c7a56-123d-47b8-8e94-245995b89e61"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.401098 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c87c7a56-123d-47b8-8e94-245995b89e61" (UID: "c87c7a56-123d-47b8-8e94-245995b89e61"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.401559 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-inventory" (OuterVolumeSpecName: "inventory") pod "c87c7a56-123d-47b8-8e94-245995b89e61" (UID: "c87c7a56-123d-47b8-8e94-245995b89e61"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.470521 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.470567 5070 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.470584 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.470597 5070 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.470609 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.470620 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2f4k\" (UniqueName: \"kubernetes.io/projected/c87c7a56-123d-47b8-8e94-245995b89e61-kube-api-access-r2f4k\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.470634 5070 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c87c7a56-123d-47b8-8e94-245995b89e61-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.953300 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" event={"ID":"c87c7a56-123d-47b8-8e94-245995b89e61","Type":"ContainerDied","Data":"3a560ee7100f90b07cf828192b7aa8bb3a18e573fbff2cd1a2caecd125d06de7"} Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.953342 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a560ee7100f90b07cf828192b7aa8bb3a18e573fbff2cd1a2caecd125d06de7" Dec 13 03:54:35 crc kubenswrapper[5070]: I1213 03:54:35.953349 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.050595 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m"] Dec 13 03:54:36 crc kubenswrapper[5070]: E1213 03:54:36.051086 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c87c7a56-123d-47b8-8e94-245995b89e61" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.051107 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c87c7a56-123d-47b8-8e94-245995b89e61" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.051354 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="c87c7a56-123d-47b8-8e94-245995b89e61" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.052035 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.054565 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.054612 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.054841 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.054918 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.055045 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.057026 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.061170 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m"] Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.083631 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.083712 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.083775 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.083805 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.083984 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2n6n\" (UniqueName: \"kubernetes.io/projected/244cbfa6-dea4-4fae-b3f5-582f53c21551-kube-api-access-c2n6n\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.084053 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.185930 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.185989 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.186035 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2n6n\" (UniqueName: \"kubernetes.io/projected/244cbfa6-dea4-4fae-b3f5-582f53c21551-kube-api-access-c2n6n\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.186070 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.186185 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.186250 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.192264 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.192830 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.193622 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.204111 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.204127 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.210474 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2n6n\" (UniqueName: \"kubernetes.io/projected/244cbfa6-dea4-4fae-b3f5-582f53c21551-kube-api-access-c2n6n\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ls75m\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.371425 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.513590 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.562436 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2wwlv"] Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.908884 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m"] Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.961723 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" event={"ID":"244cbfa6-dea4-4fae-b3f5-582f53c21551","Type":"ContainerStarted","Data":"eae61252474c8556e587d8dfcf849b1aa291b684d042deb11e91d7635a7990ce"} Dec 13 03:54:36 crc kubenswrapper[5070]: I1213 03:54:36.961881 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2wwlv" podUID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerName="registry-server" containerID="cri-o://aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7" gracePeriod=2 Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.388692 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.409413 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-catalog-content\") pod \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.409573 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-utilities\") pod \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.409610 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndh9b\" (UniqueName: \"kubernetes.io/projected/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-kube-api-access-ndh9b\") pod \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\" (UID: \"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1\") " Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.411326 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-utilities" (OuterVolumeSpecName: "utilities") pod "d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" (UID: "d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.415842 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-kube-api-access-ndh9b" (OuterVolumeSpecName: "kube-api-access-ndh9b") pod "d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" (UID: "d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1"). InnerVolumeSpecName "kube-api-access-ndh9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.472804 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" (UID: "d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.511241 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.511431 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.511532 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndh9b\" (UniqueName: \"kubernetes.io/projected/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1-kube-api-access-ndh9b\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.973312 5070 generic.go:334] "Generic (PLEG): container finished" podID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerID="aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7" exitCode=0 Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.973402 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2wwlv" event={"ID":"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1","Type":"ContainerDied","Data":"aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7"} Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.973708 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2wwlv" event={"ID":"d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1","Type":"ContainerDied","Data":"656317df0a70b0c177f1603c0e629a9fdd205e56ee8193738313bc3b978f3ea8"} Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.973727 5070 scope.go:117] "RemoveContainer" containerID="aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7" Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.973431 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2wwlv" Dec 13 03:54:37 crc kubenswrapper[5070]: I1213 03:54:37.979099 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" event={"ID":"244cbfa6-dea4-4fae-b3f5-582f53c21551","Type":"ContainerStarted","Data":"020d17efa80118ee49f5395b448aaf01789598f7f8117c02c2f4a274cbc8c2a5"} Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.001571 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" podStartSLOduration=1.453680471 podStartE2EDuration="2.00155209s" podCreationTimestamp="2025-12-13 03:54:36 +0000 UTC" firstStartedPulling="2025-12-13 03:54:36.918008006 +0000 UTC m=+2569.153851552" lastFinishedPulling="2025-12-13 03:54:37.465879625 +0000 UTC m=+2569.701723171" observedRunningTime="2025-12-13 03:54:37.996232026 +0000 UTC m=+2570.232075572" watchObservedRunningTime="2025-12-13 03:54:38.00155209 +0000 UTC m=+2570.237395636" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.005531 5070 scope.go:117] "RemoveContainer" containerID="d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.027733 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2wwlv"] Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.035884 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2wwlv"] Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.037884 5070 scope.go:117] "RemoveContainer" containerID="d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.059062 5070 scope.go:117] "RemoveContainer" containerID="aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7" Dec 13 03:54:38 crc kubenswrapper[5070]: E1213 03:54:38.059593 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7\": container with ID starting with aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7 not found: ID does not exist" containerID="aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.059634 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7"} err="failed to get container status \"aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7\": rpc error: code = NotFound desc = could not find container \"aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7\": container with ID starting with aa26b5fad510701aa4eb81aa2b2fb712713367b52e8235138bb317dcb881c8f7 not found: ID does not exist" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.059661 5070 scope.go:117] "RemoveContainer" containerID="d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d" Dec 13 03:54:38 crc kubenswrapper[5070]: E1213 03:54:38.060288 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d\": container with ID starting with d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d not found: ID does not exist" containerID="d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.060311 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d"} err="failed to get container status \"d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d\": rpc error: code = NotFound desc = could not find container \"d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d\": container with ID starting with d90d417f863aadaed08307a84a7d86522782acabb8446abb4522921034264b0d not found: ID does not exist" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.060327 5070 scope.go:117] "RemoveContainer" containerID="d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3" Dec 13 03:54:38 crc kubenswrapper[5070]: E1213 03:54:38.060741 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3\": container with ID starting with d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3 not found: ID does not exist" containerID="d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.060801 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3"} err="failed to get container status \"d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3\": rpc error: code = NotFound desc = could not find container \"d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3\": container with ID starting with d102814b9c8b859e78464912500c1f22df98892a36b5c6adb9ea9e0015d9d0e3 not found: ID does not exist" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.178396 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" path="/var/lib/kubelet/pods/d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1/volumes" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.756092 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zfksw"] Dec 13 03:54:38 crc kubenswrapper[5070]: E1213 03:54:38.756486 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerName="extract-content" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.756498 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerName="extract-content" Dec 13 03:54:38 crc kubenswrapper[5070]: E1213 03:54:38.756516 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerName="extract-utilities" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.756522 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerName="extract-utilities" Dec 13 03:54:38 crc kubenswrapper[5070]: E1213 03:54:38.756550 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerName="registry-server" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.756557 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerName="registry-server" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.756707 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6ebbd63-9cd1-4de0-a71e-5f5b80be68b1" containerName="registry-server" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.757990 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.768230 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfksw"] Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.831460 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpsbp\" (UniqueName: \"kubernetes.io/projected/983f9b9e-bdff-450d-91bb-ea5a9828472a-kube-api-access-tpsbp\") pod \"redhat-marketplace-zfksw\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.831537 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-utilities\") pod \"redhat-marketplace-zfksw\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.831663 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-catalog-content\") pod \"redhat-marketplace-zfksw\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.932995 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-catalog-content\") pod \"redhat-marketplace-zfksw\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.933119 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpsbp\" (UniqueName: \"kubernetes.io/projected/983f9b9e-bdff-450d-91bb-ea5a9828472a-kube-api-access-tpsbp\") pod \"redhat-marketplace-zfksw\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.933155 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-utilities\") pod \"redhat-marketplace-zfksw\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.933638 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-catalog-content\") pod \"redhat-marketplace-zfksw\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.933687 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-utilities\") pod \"redhat-marketplace-zfksw\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:38 crc kubenswrapper[5070]: I1213 03:54:38.955385 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpsbp\" (UniqueName: \"kubernetes.io/projected/983f9b9e-bdff-450d-91bb-ea5a9828472a-kube-api-access-tpsbp\") pod \"redhat-marketplace-zfksw\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:39 crc kubenswrapper[5070]: I1213 03:54:39.087177 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:39 crc kubenswrapper[5070]: I1213 03:54:39.566239 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfksw"] Dec 13 03:54:39 crc kubenswrapper[5070]: W1213 03:54:39.573133 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod983f9b9e_bdff_450d_91bb_ea5a9828472a.slice/crio-19cddf3db71e15ed9a84b52e30b92cceb7f2ec1912e31d7254c8e8c647a04826 WatchSource:0}: Error finding container 19cddf3db71e15ed9a84b52e30b92cceb7f2ec1912e31d7254c8e8c647a04826: Status 404 returned error can't find the container with id 19cddf3db71e15ed9a84b52e30b92cceb7f2ec1912e31d7254c8e8c647a04826 Dec 13 03:54:39 crc kubenswrapper[5070]: I1213 03:54:39.998298 5070 generic.go:334] "Generic (PLEG): container finished" podID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerID="5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23" exitCode=0 Dec 13 03:54:39 crc kubenswrapper[5070]: I1213 03:54:39.998388 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfksw" event={"ID":"983f9b9e-bdff-450d-91bb-ea5a9828472a","Type":"ContainerDied","Data":"5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23"} Dec 13 03:54:39 crc kubenswrapper[5070]: I1213 03:54:39.998631 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfksw" event={"ID":"983f9b9e-bdff-450d-91bb-ea5a9828472a","Type":"ContainerStarted","Data":"19cddf3db71e15ed9a84b52e30b92cceb7f2ec1912e31d7254c8e8c647a04826"} Dec 13 03:54:42 crc kubenswrapper[5070]: I1213 03:54:42.016303 5070 generic.go:334] "Generic (PLEG): container finished" podID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerID="d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561" exitCode=0 Dec 13 03:54:42 crc kubenswrapper[5070]: I1213 03:54:42.016359 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfksw" event={"ID":"983f9b9e-bdff-450d-91bb-ea5a9828472a","Type":"ContainerDied","Data":"d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561"} Dec 13 03:54:45 crc kubenswrapper[5070]: I1213 03:54:45.047893 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfksw" event={"ID":"983f9b9e-bdff-450d-91bb-ea5a9828472a","Type":"ContainerStarted","Data":"84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee"} Dec 13 03:54:45 crc kubenswrapper[5070]: I1213 03:54:45.071339 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zfksw" podStartSLOduration=3.254286435 podStartE2EDuration="7.071318617s" podCreationTimestamp="2025-12-13 03:54:38 +0000 UTC" firstStartedPulling="2025-12-13 03:54:40.000010728 +0000 UTC m=+2572.235854274" lastFinishedPulling="2025-12-13 03:54:43.81704291 +0000 UTC m=+2576.052886456" observedRunningTime="2025-12-13 03:54:45.064637595 +0000 UTC m=+2577.300481151" watchObservedRunningTime="2025-12-13 03:54:45.071318617 +0000 UTC m=+2577.307162163" Dec 13 03:54:46 crc kubenswrapper[5070]: I1213 03:54:46.167595 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:54:46 crc kubenswrapper[5070]: E1213 03:54:46.168903 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:54:49 crc kubenswrapper[5070]: I1213 03:54:49.088319 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:49 crc kubenswrapper[5070]: I1213 03:54:49.088701 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:49 crc kubenswrapper[5070]: I1213 03:54:49.134956 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:50 crc kubenswrapper[5070]: I1213 03:54:50.131825 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:50 crc kubenswrapper[5070]: I1213 03:54:50.181893 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfksw"] Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.103155 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zfksw" podUID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerName="registry-server" containerID="cri-o://84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee" gracePeriod=2 Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.541262 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.616319 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-catalog-content\") pod \"983f9b9e-bdff-450d-91bb-ea5a9828472a\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.616511 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpsbp\" (UniqueName: \"kubernetes.io/projected/983f9b9e-bdff-450d-91bb-ea5a9828472a-kube-api-access-tpsbp\") pod \"983f9b9e-bdff-450d-91bb-ea5a9828472a\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.616594 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-utilities\") pod \"983f9b9e-bdff-450d-91bb-ea5a9828472a\" (UID: \"983f9b9e-bdff-450d-91bb-ea5a9828472a\") " Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.617620 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-utilities" (OuterVolumeSpecName: "utilities") pod "983f9b9e-bdff-450d-91bb-ea5a9828472a" (UID: "983f9b9e-bdff-450d-91bb-ea5a9828472a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.623923 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/983f9b9e-bdff-450d-91bb-ea5a9828472a-kube-api-access-tpsbp" (OuterVolumeSpecName: "kube-api-access-tpsbp") pod "983f9b9e-bdff-450d-91bb-ea5a9828472a" (UID: "983f9b9e-bdff-450d-91bb-ea5a9828472a"). InnerVolumeSpecName "kube-api-access-tpsbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.639815 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "983f9b9e-bdff-450d-91bb-ea5a9828472a" (UID: "983f9b9e-bdff-450d-91bb-ea5a9828472a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.718342 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.718387 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/983f9b9e-bdff-450d-91bb-ea5a9828472a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:52 crc kubenswrapper[5070]: I1213 03:54:52.718402 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpsbp\" (UniqueName: \"kubernetes.io/projected/983f9b9e-bdff-450d-91bb-ea5a9828472a-kube-api-access-tpsbp\") on node \"crc\" DevicePath \"\"" Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.113737 5070 generic.go:334] "Generic (PLEG): container finished" podID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerID="84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee" exitCode=0 Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.113817 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfksw" Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.113801 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfksw" event={"ID":"983f9b9e-bdff-450d-91bb-ea5a9828472a","Type":"ContainerDied","Data":"84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee"} Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.114214 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfksw" event={"ID":"983f9b9e-bdff-450d-91bb-ea5a9828472a","Type":"ContainerDied","Data":"19cddf3db71e15ed9a84b52e30b92cceb7f2ec1912e31d7254c8e8c647a04826"} Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.114240 5070 scope.go:117] "RemoveContainer" containerID="84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee" Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.135884 5070 scope.go:117] "RemoveContainer" containerID="d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561" Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.152255 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfksw"] Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.160163 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfksw"] Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.160893 5070 scope.go:117] "RemoveContainer" containerID="5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23" Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.219685 5070 scope.go:117] "RemoveContainer" containerID="84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee" Dec 13 03:54:53 crc kubenswrapper[5070]: E1213 03:54:53.220202 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee\": container with ID starting with 84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee not found: ID does not exist" containerID="84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee" Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.220247 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee"} err="failed to get container status \"84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee\": rpc error: code = NotFound desc = could not find container \"84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee\": container with ID starting with 84dd08ce7c4963b422cceb45da7544666b6cc11c69eb21260626e45e79e482ee not found: ID does not exist" Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.220274 5070 scope.go:117] "RemoveContainer" containerID="d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561" Dec 13 03:54:53 crc kubenswrapper[5070]: E1213 03:54:53.220630 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561\": container with ID starting with d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561 not found: ID does not exist" containerID="d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561" Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.220718 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561"} err="failed to get container status \"d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561\": rpc error: code = NotFound desc = could not find container \"d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561\": container with ID starting with d3a6f2d0a947fafe69d71001e28e505d9da420f391381d6bc8670105ac0b9561 not found: ID does not exist" Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.220754 5070 scope.go:117] "RemoveContainer" containerID="5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23" Dec 13 03:54:53 crc kubenswrapper[5070]: E1213 03:54:53.221501 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23\": container with ID starting with 5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23 not found: ID does not exist" containerID="5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23" Dec 13 03:54:53 crc kubenswrapper[5070]: I1213 03:54:53.221535 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23"} err="failed to get container status \"5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23\": rpc error: code = NotFound desc = could not find container \"5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23\": container with ID starting with 5eee21067d106813d3953684a203f23494824bde214770e90cb24b756e385d23 not found: ID does not exist" Dec 13 03:54:54 crc kubenswrapper[5070]: I1213 03:54:54.176720 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="983f9b9e-bdff-450d-91bb-ea5a9828472a" path="/var/lib/kubelet/pods/983f9b9e-bdff-450d-91bb-ea5a9828472a/volumes" Dec 13 03:54:57 crc kubenswrapper[5070]: I1213 03:54:57.167728 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:54:57 crc kubenswrapper[5070]: E1213 03:54:57.169548 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:55:11 crc kubenswrapper[5070]: I1213 03:55:11.168078 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:55:11 crc kubenswrapper[5070]: E1213 03:55:11.169075 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:55:26 crc kubenswrapper[5070]: I1213 03:55:26.167289 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:55:26 crc kubenswrapper[5070]: E1213 03:55:26.168417 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:55:40 crc kubenswrapper[5070]: I1213 03:55:40.166981 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:55:40 crc kubenswrapper[5070]: E1213 03:55:40.167815 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:55:55 crc kubenswrapper[5070]: I1213 03:55:55.167117 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:55:55 crc kubenswrapper[5070]: E1213 03:55:55.167931 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:56:10 crc kubenswrapper[5070]: I1213 03:56:10.167547 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:56:10 crc kubenswrapper[5070]: E1213 03:56:10.168331 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:56:24 crc kubenswrapper[5070]: I1213 03:56:24.167413 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:56:24 crc kubenswrapper[5070]: E1213 03:56:24.168356 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:56:38 crc kubenswrapper[5070]: I1213 03:56:38.173196 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:56:38 crc kubenswrapper[5070]: E1213 03:56:38.173983 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:56:53 crc kubenswrapper[5070]: I1213 03:56:53.167265 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:56:53 crc kubenswrapper[5070]: E1213 03:56:53.167954 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:57:04 crc kubenswrapper[5070]: I1213 03:57:04.177740 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:57:04 crc kubenswrapper[5070]: E1213 03:57:04.178919 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:57:19 crc kubenswrapper[5070]: I1213 03:57:19.167217 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:57:19 crc kubenswrapper[5070]: E1213 03:57:19.167962 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 03:57:33 crc kubenswrapper[5070]: I1213 03:57:33.167134 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 03:57:33 crc kubenswrapper[5070]: I1213 03:57:33.467918 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"9987099f8e98d31d38862264f7e07bca0970527ae353fb5b7e9a8135e3aed8d3"} Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.300873 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fw62b"] Dec 13 03:57:56 crc kubenswrapper[5070]: E1213 03:57:56.301819 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerName="extract-content" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.301839 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerName="extract-content" Dec 13 03:57:56 crc kubenswrapper[5070]: E1213 03:57:56.301858 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerName="extract-utilities" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.301866 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerName="extract-utilities" Dec 13 03:57:56 crc kubenswrapper[5070]: E1213 03:57:56.301882 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerName="registry-server" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.301891 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerName="registry-server" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.302206 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="983f9b9e-bdff-450d-91bb-ea5a9828472a" containerName="registry-server" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.303813 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.322028 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fw62b"] Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.366079 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpk55\" (UniqueName: \"kubernetes.io/projected/c526a897-3e53-4ed2-8973-69f46dcda027-kube-api-access-dpk55\") pod \"redhat-operators-fw62b\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.366139 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-catalog-content\") pod \"redhat-operators-fw62b\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.366172 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-utilities\") pod \"redhat-operators-fw62b\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.467739 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpk55\" (UniqueName: \"kubernetes.io/projected/c526a897-3e53-4ed2-8973-69f46dcda027-kube-api-access-dpk55\") pod \"redhat-operators-fw62b\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.468298 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-catalog-content\") pod \"redhat-operators-fw62b\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.468435 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-utilities\") pod \"redhat-operators-fw62b\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.468779 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-catalog-content\") pod \"redhat-operators-fw62b\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.469046 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-utilities\") pod \"redhat-operators-fw62b\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.497151 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpk55\" (UniqueName: \"kubernetes.io/projected/c526a897-3e53-4ed2-8973-69f46dcda027-kube-api-access-dpk55\") pod \"redhat-operators-fw62b\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:56 crc kubenswrapper[5070]: I1213 03:57:56.623419 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:57:57 crc kubenswrapper[5070]: I1213 03:57:57.099373 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fw62b"] Dec 13 03:57:57 crc kubenswrapper[5070]: I1213 03:57:57.681806 5070 generic.go:334] "Generic (PLEG): container finished" podID="c526a897-3e53-4ed2-8973-69f46dcda027" containerID="c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1" exitCode=0 Dec 13 03:57:57 crc kubenswrapper[5070]: I1213 03:57:57.681860 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw62b" event={"ID":"c526a897-3e53-4ed2-8973-69f46dcda027","Type":"ContainerDied","Data":"c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1"} Dec 13 03:57:57 crc kubenswrapper[5070]: I1213 03:57:57.682536 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw62b" event={"ID":"c526a897-3e53-4ed2-8973-69f46dcda027","Type":"ContainerStarted","Data":"86ca8780f9e0f8360d1429ab06f270b00756da636f8e99ba72ce00696f1c6c50"} Dec 13 03:57:58 crc kubenswrapper[5070]: I1213 03:57:58.702479 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw62b" event={"ID":"c526a897-3e53-4ed2-8973-69f46dcda027","Type":"ContainerStarted","Data":"75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627"} Dec 13 03:57:59 crc kubenswrapper[5070]: E1213 03:57:59.522583 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc526a897_3e53_4ed2_8973_69f46dcda027.slice/crio-75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627.scope\": RecentStats: unable to find data in memory cache]" Dec 13 03:58:01 crc kubenswrapper[5070]: I1213 03:58:01.727307 5070 generic.go:334] "Generic (PLEG): container finished" podID="c526a897-3e53-4ed2-8973-69f46dcda027" containerID="75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627" exitCode=0 Dec 13 03:58:01 crc kubenswrapper[5070]: I1213 03:58:01.727394 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw62b" event={"ID":"c526a897-3e53-4ed2-8973-69f46dcda027","Type":"ContainerDied","Data":"75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627"} Dec 13 03:58:03 crc kubenswrapper[5070]: I1213 03:58:03.904296 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw62b" event={"ID":"c526a897-3e53-4ed2-8973-69f46dcda027","Type":"ContainerStarted","Data":"f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e"} Dec 13 03:58:03 crc kubenswrapper[5070]: I1213 03:58:03.925425 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fw62b" podStartSLOduration=3.429802517 podStartE2EDuration="7.925409735s" podCreationTimestamp="2025-12-13 03:57:56 +0000 UTC" firstStartedPulling="2025-12-13 03:57:57.683385706 +0000 UTC m=+2769.919229252" lastFinishedPulling="2025-12-13 03:58:02.178992924 +0000 UTC m=+2774.414836470" observedRunningTime="2025-12-13 03:58:03.922388742 +0000 UTC m=+2776.158232288" watchObservedRunningTime="2025-12-13 03:58:03.925409735 +0000 UTC m=+2776.161253281" Dec 13 03:58:06 crc kubenswrapper[5070]: I1213 03:58:06.624242 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:58:06 crc kubenswrapper[5070]: I1213 03:58:06.625789 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:58:07 crc kubenswrapper[5070]: I1213 03:58:07.668926 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fw62b" podUID="c526a897-3e53-4ed2-8973-69f46dcda027" containerName="registry-server" probeResult="failure" output=< Dec 13 03:58:07 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 03:58:07 crc kubenswrapper[5070]: > Dec 13 03:58:09 crc kubenswrapper[5070]: I1213 03:58:09.879889 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z55zd"] Dec 13 03:58:09 crc kubenswrapper[5070]: I1213 03:58:09.884317 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:09 crc kubenswrapper[5070]: I1213 03:58:09.891355 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z55zd"] Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.027036 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-utilities\") pod \"certified-operators-z55zd\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.027099 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbb5z\" (UniqueName: \"kubernetes.io/projected/516cf3ad-a97e-4a23-98f3-47db8f1501a7-kube-api-access-kbb5z\") pod \"certified-operators-z55zd\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.027335 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-catalog-content\") pod \"certified-operators-z55zd\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.129464 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-utilities\") pod \"certified-operators-z55zd\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.129520 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbb5z\" (UniqueName: \"kubernetes.io/projected/516cf3ad-a97e-4a23-98f3-47db8f1501a7-kube-api-access-kbb5z\") pod \"certified-operators-z55zd\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.129598 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-catalog-content\") pod \"certified-operators-z55zd\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.129979 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-utilities\") pod \"certified-operators-z55zd\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.130063 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-catalog-content\") pod \"certified-operators-z55zd\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.148992 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbb5z\" (UniqueName: \"kubernetes.io/projected/516cf3ad-a97e-4a23-98f3-47db8f1501a7-kube-api-access-kbb5z\") pod \"certified-operators-z55zd\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.217818 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:10 crc kubenswrapper[5070]: W1213 03:58:10.755555 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod516cf3ad_a97e_4a23_98f3_47db8f1501a7.slice/crio-23dfb73e0bffbb491be5bddca933a700de2b69b39c74d91a430b7ae42429e517 WatchSource:0}: Error finding container 23dfb73e0bffbb491be5bddca933a700de2b69b39c74d91a430b7ae42429e517: Status 404 returned error can't find the container with id 23dfb73e0bffbb491be5bddca933a700de2b69b39c74d91a430b7ae42429e517 Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.756268 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z55zd"] Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.965038 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z55zd" event={"ID":"516cf3ad-a97e-4a23-98f3-47db8f1501a7","Type":"ContainerStarted","Data":"8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1"} Dec 13 03:58:10 crc kubenswrapper[5070]: I1213 03:58:10.965114 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z55zd" event={"ID":"516cf3ad-a97e-4a23-98f3-47db8f1501a7","Type":"ContainerStarted","Data":"23dfb73e0bffbb491be5bddca933a700de2b69b39c74d91a430b7ae42429e517"} Dec 13 03:58:11 crc kubenswrapper[5070]: I1213 03:58:11.977407 5070 generic.go:334] "Generic (PLEG): container finished" podID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerID="8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1" exitCode=0 Dec 13 03:58:11 crc kubenswrapper[5070]: I1213 03:58:11.977497 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z55zd" event={"ID":"516cf3ad-a97e-4a23-98f3-47db8f1501a7","Type":"ContainerDied","Data":"8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1"} Dec 13 03:58:12 crc kubenswrapper[5070]: I1213 03:58:12.991329 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z55zd" event={"ID":"516cf3ad-a97e-4a23-98f3-47db8f1501a7","Type":"ContainerStarted","Data":"1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85"} Dec 13 03:58:14 crc kubenswrapper[5070]: I1213 03:58:14.002821 5070 generic.go:334] "Generic (PLEG): container finished" podID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerID="1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85" exitCode=0 Dec 13 03:58:14 crc kubenswrapper[5070]: I1213 03:58:14.002925 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z55zd" event={"ID":"516cf3ad-a97e-4a23-98f3-47db8f1501a7","Type":"ContainerDied","Data":"1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85"} Dec 13 03:58:16 crc kubenswrapper[5070]: I1213 03:58:16.027118 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z55zd" event={"ID":"516cf3ad-a97e-4a23-98f3-47db8f1501a7","Type":"ContainerStarted","Data":"c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a"} Dec 13 03:58:16 crc kubenswrapper[5070]: I1213 03:58:16.044812 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z55zd" podStartSLOduration=4.063044987 podStartE2EDuration="7.044791037s" podCreationTimestamp="2025-12-13 03:58:09 +0000 UTC" firstStartedPulling="2025-12-13 03:58:11.979492153 +0000 UTC m=+2784.215335699" lastFinishedPulling="2025-12-13 03:58:14.961238213 +0000 UTC m=+2787.197081749" observedRunningTime="2025-12-13 03:58:16.042015581 +0000 UTC m=+2788.277859147" watchObservedRunningTime="2025-12-13 03:58:16.044791037 +0000 UTC m=+2788.280634583" Dec 13 03:58:16 crc kubenswrapper[5070]: I1213 03:58:16.685073 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:58:16 crc kubenswrapper[5070]: I1213 03:58:16.733757 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:58:17 crc kubenswrapper[5070]: I1213 03:58:17.274496 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fw62b"] Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.044861 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fw62b" podUID="c526a897-3e53-4ed2-8973-69f46dcda027" containerName="registry-server" containerID="cri-o://f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e" gracePeriod=2 Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.507141 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.614648 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-catalog-content\") pod \"c526a897-3e53-4ed2-8973-69f46dcda027\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.614793 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpk55\" (UniqueName: \"kubernetes.io/projected/c526a897-3e53-4ed2-8973-69f46dcda027-kube-api-access-dpk55\") pod \"c526a897-3e53-4ed2-8973-69f46dcda027\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.614932 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-utilities\") pod \"c526a897-3e53-4ed2-8973-69f46dcda027\" (UID: \"c526a897-3e53-4ed2-8973-69f46dcda027\") " Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.615743 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-utilities" (OuterVolumeSpecName: "utilities") pod "c526a897-3e53-4ed2-8973-69f46dcda027" (UID: "c526a897-3e53-4ed2-8973-69f46dcda027"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.624838 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c526a897-3e53-4ed2-8973-69f46dcda027-kube-api-access-dpk55" (OuterVolumeSpecName: "kube-api-access-dpk55") pod "c526a897-3e53-4ed2-8973-69f46dcda027" (UID: "c526a897-3e53-4ed2-8973-69f46dcda027"). InnerVolumeSpecName "kube-api-access-dpk55". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.714423 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c526a897-3e53-4ed2-8973-69f46dcda027" (UID: "c526a897-3e53-4ed2-8973-69f46dcda027"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.717379 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.717425 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c526a897-3e53-4ed2-8973-69f46dcda027-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:58:18 crc kubenswrapper[5070]: I1213 03:58:18.717439 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpk55\" (UniqueName: \"kubernetes.io/projected/c526a897-3e53-4ed2-8973-69f46dcda027-kube-api-access-dpk55\") on node \"crc\" DevicePath \"\"" Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.064488 5070 generic.go:334] "Generic (PLEG): container finished" podID="c526a897-3e53-4ed2-8973-69f46dcda027" containerID="f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e" exitCode=0 Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.064545 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw62b" event={"ID":"c526a897-3e53-4ed2-8973-69f46dcda027","Type":"ContainerDied","Data":"f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e"} Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.064581 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw62b" event={"ID":"c526a897-3e53-4ed2-8973-69f46dcda027","Type":"ContainerDied","Data":"86ca8780f9e0f8360d1429ab06f270b00756da636f8e99ba72ce00696f1c6c50"} Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.064632 5070 scope.go:117] "RemoveContainer" containerID="f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e" Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.064806 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fw62b" Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.085455 5070 scope.go:117] "RemoveContainer" containerID="75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627" Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.110358 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fw62b"] Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.122823 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fw62b"] Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.129503 5070 scope.go:117] "RemoveContainer" containerID="c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1" Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.154732 5070 scope.go:117] "RemoveContainer" containerID="f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e" Dec 13 03:58:19 crc kubenswrapper[5070]: E1213 03:58:19.155218 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e\": container with ID starting with f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e not found: ID does not exist" containerID="f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e" Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.155257 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e"} err="failed to get container status \"f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e\": rpc error: code = NotFound desc = could not find container \"f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e\": container with ID starting with f07bde96e98472b110f02341e21da914a7d8fd3d51503f3352a796dbb632799e not found: ID does not exist" Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.155299 5070 scope.go:117] "RemoveContainer" containerID="75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627" Dec 13 03:58:19 crc kubenswrapper[5070]: E1213 03:58:19.155785 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627\": container with ID starting with 75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627 not found: ID does not exist" containerID="75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627" Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.155825 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627"} err="failed to get container status \"75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627\": rpc error: code = NotFound desc = could not find container \"75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627\": container with ID starting with 75826a500eaf886c39f66a533544c705032ca01bb93c97abc7cbc35897589627 not found: ID does not exist" Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.155849 5070 scope.go:117] "RemoveContainer" containerID="c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1" Dec 13 03:58:19 crc kubenswrapper[5070]: E1213 03:58:19.156335 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1\": container with ID starting with c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1 not found: ID does not exist" containerID="c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1" Dec 13 03:58:19 crc kubenswrapper[5070]: I1213 03:58:19.156377 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1"} err="failed to get container status \"c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1\": rpc error: code = NotFound desc = could not find container \"c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1\": container with ID starting with c5eb0eddb153d570e1c19d7ea2c0a0cb0f5967696dc6320d41b6dc07d9b373c1 not found: ID does not exist" Dec 13 03:58:20 crc kubenswrapper[5070]: I1213 03:58:20.177338 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c526a897-3e53-4ed2-8973-69f46dcda027" path="/var/lib/kubelet/pods/c526a897-3e53-4ed2-8973-69f46dcda027/volumes" Dec 13 03:58:20 crc kubenswrapper[5070]: I1213 03:58:20.217999 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:20 crc kubenswrapper[5070]: I1213 03:58:20.218057 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:20 crc kubenswrapper[5070]: I1213 03:58:20.277494 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:21 crc kubenswrapper[5070]: I1213 03:58:21.131459 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:21 crc kubenswrapper[5070]: I1213 03:58:21.676548 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z55zd"] Dec 13 03:58:23 crc kubenswrapper[5070]: I1213 03:58:23.099363 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z55zd" podUID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerName="registry-server" containerID="cri-o://c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a" gracePeriod=2 Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.072426 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.109822 5070 generic.go:334] "Generic (PLEG): container finished" podID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerID="c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a" exitCode=0 Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.109866 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z55zd" event={"ID":"516cf3ad-a97e-4a23-98f3-47db8f1501a7","Type":"ContainerDied","Data":"c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a"} Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.109922 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z55zd" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.109952 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z55zd" event={"ID":"516cf3ad-a97e-4a23-98f3-47db8f1501a7","Type":"ContainerDied","Data":"23dfb73e0bffbb491be5bddca933a700de2b69b39c74d91a430b7ae42429e517"} Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.110005 5070 scope.go:117] "RemoveContainer" containerID="c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.130904 5070 scope.go:117] "RemoveContainer" containerID="1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.154641 5070 scope.go:117] "RemoveContainer" containerID="8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.192891 5070 scope.go:117] "RemoveContainer" containerID="c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a" Dec 13 03:58:24 crc kubenswrapper[5070]: E1213 03:58:24.193348 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a\": container with ID starting with c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a not found: ID does not exist" containerID="c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.193380 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a"} err="failed to get container status \"c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a\": rpc error: code = NotFound desc = could not find container \"c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a\": container with ID starting with c7d5f97622f796b1dde2de2e6e6945a727f3dcb887e52be009e23ed7be248a2a not found: ID does not exist" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.193402 5070 scope.go:117] "RemoveContainer" containerID="1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85" Dec 13 03:58:24 crc kubenswrapper[5070]: E1213 03:58:24.194393 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85\": container with ID starting with 1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85 not found: ID does not exist" containerID="1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.194473 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85"} err="failed to get container status \"1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85\": rpc error: code = NotFound desc = could not find container \"1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85\": container with ID starting with 1c79111a0d5e4b23a124df66c8c5f3e12094c65e7dcecf8bcde1bdec84ac5c85 not found: ID does not exist" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.194516 5070 scope.go:117] "RemoveContainer" containerID="8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1" Dec 13 03:58:24 crc kubenswrapper[5070]: E1213 03:58:24.200742 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1\": container with ID starting with 8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1 not found: ID does not exist" containerID="8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.200800 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1"} err="failed to get container status \"8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1\": rpc error: code = NotFound desc = could not find container \"8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1\": container with ID starting with 8b536cf114a47eecbc23ce0aee3d72c6c61bccb3d4cb8c2fbc64c9170d49d5a1 not found: ID does not exist" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.218770 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-catalog-content\") pod \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.218925 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-utilities\") pod \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.218974 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbb5z\" (UniqueName: \"kubernetes.io/projected/516cf3ad-a97e-4a23-98f3-47db8f1501a7-kube-api-access-kbb5z\") pod \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\" (UID: \"516cf3ad-a97e-4a23-98f3-47db8f1501a7\") " Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.219993 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-utilities" (OuterVolumeSpecName: "utilities") pod "516cf3ad-a97e-4a23-98f3-47db8f1501a7" (UID: "516cf3ad-a97e-4a23-98f3-47db8f1501a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.224431 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/516cf3ad-a97e-4a23-98f3-47db8f1501a7-kube-api-access-kbb5z" (OuterVolumeSpecName: "kube-api-access-kbb5z") pod "516cf3ad-a97e-4a23-98f3-47db8f1501a7" (UID: "516cf3ad-a97e-4a23-98f3-47db8f1501a7"). InnerVolumeSpecName "kube-api-access-kbb5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.292104 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "516cf3ad-a97e-4a23-98f3-47db8f1501a7" (UID: "516cf3ad-a97e-4a23-98f3-47db8f1501a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.354401 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.355145 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/516cf3ad-a97e-4a23-98f3-47db8f1501a7-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.355172 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbb5z\" (UniqueName: \"kubernetes.io/projected/516cf3ad-a97e-4a23-98f3-47db8f1501a7-kube-api-access-kbb5z\") on node \"crc\" DevicePath \"\"" Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.448244 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z55zd"] Dec 13 03:58:24 crc kubenswrapper[5070]: I1213 03:58:24.455848 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z55zd"] Dec 13 03:58:26 crc kubenswrapper[5070]: I1213 03:58:26.181024 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" path="/var/lib/kubelet/pods/516cf3ad-a97e-4a23-98f3-47db8f1501a7/volumes" Dec 13 03:59:07 crc kubenswrapper[5070]: I1213 03:59:07.499556 5070 generic.go:334] "Generic (PLEG): container finished" podID="244cbfa6-dea4-4fae-b3f5-582f53c21551" containerID="020d17efa80118ee49f5395b448aaf01789598f7f8117c02c2f4a274cbc8c2a5" exitCode=0 Dec 13 03:59:07 crc kubenswrapper[5070]: I1213 03:59:07.499945 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" event={"ID":"244cbfa6-dea4-4fae-b3f5-582f53c21551","Type":"ContainerDied","Data":"020d17efa80118ee49f5395b448aaf01789598f7f8117c02c2f4a274cbc8c2a5"} Dec 13 03:59:08 crc kubenswrapper[5070]: I1213 03:59:08.968239 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.032518 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-inventory\") pod \"244cbfa6-dea4-4fae-b3f5-582f53c21551\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.032576 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2n6n\" (UniqueName: \"kubernetes.io/projected/244cbfa6-dea4-4fae-b3f5-582f53c21551-kube-api-access-c2n6n\") pod \"244cbfa6-dea4-4fae-b3f5-582f53c21551\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.032629 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ssh-key\") pod \"244cbfa6-dea4-4fae-b3f5-582f53c21551\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.032683 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-secret-0\") pod \"244cbfa6-dea4-4fae-b3f5-582f53c21551\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.032701 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-combined-ca-bundle\") pod \"244cbfa6-dea4-4fae-b3f5-582f53c21551\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.032757 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ceph\") pod \"244cbfa6-dea4-4fae-b3f5-582f53c21551\" (UID: \"244cbfa6-dea4-4fae-b3f5-582f53c21551\") " Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.040143 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "244cbfa6-dea4-4fae-b3f5-582f53c21551" (UID: "244cbfa6-dea4-4fae-b3f5-582f53c21551"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.041226 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/244cbfa6-dea4-4fae-b3f5-582f53c21551-kube-api-access-c2n6n" (OuterVolumeSpecName: "kube-api-access-c2n6n") pod "244cbfa6-dea4-4fae-b3f5-582f53c21551" (UID: "244cbfa6-dea4-4fae-b3f5-582f53c21551"). InnerVolumeSpecName "kube-api-access-c2n6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.048568 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ceph" (OuterVolumeSpecName: "ceph") pod "244cbfa6-dea4-4fae-b3f5-582f53c21551" (UID: "244cbfa6-dea4-4fae-b3f5-582f53c21551"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.062631 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "244cbfa6-dea4-4fae-b3f5-582f53c21551" (UID: "244cbfa6-dea4-4fae-b3f5-582f53c21551"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.070250 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "244cbfa6-dea4-4fae-b3f5-582f53c21551" (UID: "244cbfa6-dea4-4fae-b3f5-582f53c21551"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.072224 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-inventory" (OuterVolumeSpecName: "inventory") pod "244cbfa6-dea4-4fae-b3f5-582f53c21551" (UID: "244cbfa6-dea4-4fae-b3f5-582f53c21551"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.137842 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.137903 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.137929 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2n6n\" (UniqueName: \"kubernetes.io/projected/244cbfa6-dea4-4fae-b3f5-582f53c21551-kube-api-access-c2n6n\") on node \"crc\" DevicePath \"\"" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.137949 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.137968 5070 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.137987 5070 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244cbfa6-dea4-4fae-b3f5-582f53c21551-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.521258 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" event={"ID":"244cbfa6-dea4-4fae-b3f5-582f53c21551","Type":"ContainerDied","Data":"eae61252474c8556e587d8dfcf849b1aa291b684d042deb11e91d7635a7990ce"} Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.521690 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eae61252474c8556e587d8dfcf849b1aa291b684d042deb11e91d7635a7990ce" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.521309 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ls75m" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.620236 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd"] Dec 13 03:59:09 crc kubenswrapper[5070]: E1213 03:59:09.624096 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerName="extract-utilities" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.624235 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerName="extract-utilities" Dec 13 03:59:09 crc kubenswrapper[5070]: E1213 03:59:09.624380 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244cbfa6-dea4-4fae-b3f5-582f53c21551" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.624519 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="244cbfa6-dea4-4fae-b3f5-582f53c21551" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 13 03:59:09 crc kubenswrapper[5070]: E1213 03:59:09.624643 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerName="extract-content" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.624809 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerName="extract-content" Dec 13 03:59:09 crc kubenswrapper[5070]: E1213 03:59:09.625408 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c526a897-3e53-4ed2-8973-69f46dcda027" containerName="registry-server" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.625530 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c526a897-3e53-4ed2-8973-69f46dcda027" containerName="registry-server" Dec 13 03:59:09 crc kubenswrapper[5070]: E1213 03:59:09.625633 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerName="registry-server" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.625713 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerName="registry-server" Dec 13 03:59:09 crc kubenswrapper[5070]: E1213 03:59:09.625794 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c526a897-3e53-4ed2-8973-69f46dcda027" containerName="extract-utilities" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.625861 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c526a897-3e53-4ed2-8973-69f46dcda027" containerName="extract-utilities" Dec 13 03:59:09 crc kubenswrapper[5070]: E1213 03:59:09.625944 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c526a897-3e53-4ed2-8973-69f46dcda027" containerName="extract-content" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.626012 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="c526a897-3e53-4ed2-8973-69f46dcda027" containerName="extract-content" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.626481 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="516cf3ad-a97e-4a23-98f3-47db8f1501a7" containerName="registry-server" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.626593 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="c526a897-3e53-4ed2-8973-69f46dcda027" containerName="registry-server" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.626679 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="244cbfa6-dea4-4fae-b3f5-582f53c21551" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.627799 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.630050 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.630098 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-gdkcc" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.630111 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.630203 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.630729 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.630791 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.630885 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.630966 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.631323 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.633582 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd"] Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.645316 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.645361 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.645393 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.645418 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.645865 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.645921 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.645967 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.646049 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.646152 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5nlb\" (UniqueName: \"kubernetes.io/projected/62c6b21d-3500-4f16-b958-7a59dd7a7fda-kube-api-access-m5nlb\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.646195 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.646241 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748422 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748539 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748597 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5nlb\" (UniqueName: \"kubernetes.io/projected/62c6b21d-3500-4f16-b958-7a59dd7a7fda-kube-api-access-m5nlb\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748626 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748651 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748697 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748720 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748750 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748773 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748792 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.748811 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.749874 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.750565 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.754198 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.754650 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.755331 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.755650 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.756416 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.756858 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.756193 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.757934 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.767293 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5nlb\" (UniqueName: \"kubernetes.io/projected/62c6b21d-3500-4f16-b958-7a59dd7a7fda-kube-api-access-m5nlb\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:09 crc kubenswrapper[5070]: I1213 03:59:09.946564 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 03:59:10 crc kubenswrapper[5070]: I1213 03:59:10.487833 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd"] Dec 13 03:59:10 crc kubenswrapper[5070]: W1213 03:59:10.495433 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62c6b21d_3500_4f16_b958_7a59dd7a7fda.slice/crio-188512ebb3ed6de427eaaf4e5203aeb5487784df2b47176f37656f46e974ccef WatchSource:0}: Error finding container 188512ebb3ed6de427eaaf4e5203aeb5487784df2b47176f37656f46e974ccef: Status 404 returned error can't find the container with id 188512ebb3ed6de427eaaf4e5203aeb5487784df2b47176f37656f46e974ccef Dec 13 03:59:10 crc kubenswrapper[5070]: I1213 03:59:10.532117 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" event={"ID":"62c6b21d-3500-4f16-b958-7a59dd7a7fda","Type":"ContainerStarted","Data":"188512ebb3ed6de427eaaf4e5203aeb5487784df2b47176f37656f46e974ccef"} Dec 13 03:59:12 crc kubenswrapper[5070]: I1213 03:59:12.557266 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" event={"ID":"62c6b21d-3500-4f16-b958-7a59dd7a7fda","Type":"ContainerStarted","Data":"de01bda092b4bb3b1fe593360fbb02a48c9fb604dd3278c5985b026fcb12b5e0"} Dec 13 03:59:51 crc kubenswrapper[5070]: I1213 03:59:51.943129 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 03:59:51 crc kubenswrapper[5070]: I1213 03:59:51.943707 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.154012 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" podStartSLOduration=50.131428847 podStartE2EDuration="51.153982705s" podCreationTimestamp="2025-12-13 03:59:09 +0000 UTC" firstStartedPulling="2025-12-13 03:59:10.498716275 +0000 UTC m=+2842.734559831" lastFinishedPulling="2025-12-13 03:59:11.521270153 +0000 UTC m=+2843.757113689" observedRunningTime="2025-12-13 03:59:12.573392578 +0000 UTC m=+2844.809236124" watchObservedRunningTime="2025-12-13 04:00:00.153982705 +0000 UTC m=+2892.389826281" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.165095 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p"] Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.167374 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.170307 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.170839 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.187524 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p"] Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.323112 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdk58\" (UniqueName: \"kubernetes.io/projected/83d41711-02f5-4543-909b-75db2a8e7857-kube-api-access-cdk58\") pod \"collect-profiles-29426640-wjr6p\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.323435 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83d41711-02f5-4543-909b-75db2a8e7857-secret-volume\") pod \"collect-profiles-29426640-wjr6p\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.323495 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83d41711-02f5-4543-909b-75db2a8e7857-config-volume\") pod \"collect-profiles-29426640-wjr6p\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.425359 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83d41711-02f5-4543-909b-75db2a8e7857-secret-volume\") pod \"collect-profiles-29426640-wjr6p\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.425979 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83d41711-02f5-4543-909b-75db2a8e7857-config-volume\") pod \"collect-profiles-29426640-wjr6p\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.426382 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdk58\" (UniqueName: \"kubernetes.io/projected/83d41711-02f5-4543-909b-75db2a8e7857-kube-api-access-cdk58\") pod \"collect-profiles-29426640-wjr6p\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.426929 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83d41711-02f5-4543-909b-75db2a8e7857-config-volume\") pod \"collect-profiles-29426640-wjr6p\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.434006 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83d41711-02f5-4543-909b-75db2a8e7857-secret-volume\") pod \"collect-profiles-29426640-wjr6p\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.452620 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdk58\" (UniqueName: \"kubernetes.io/projected/83d41711-02f5-4543-909b-75db2a8e7857-kube-api-access-cdk58\") pod \"collect-profiles-29426640-wjr6p\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.546566 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:00 crc kubenswrapper[5070]: I1213 04:00:00.993934 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p"] Dec 13 04:00:01 crc kubenswrapper[5070]: I1213 04:00:01.003369 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" event={"ID":"83d41711-02f5-4543-909b-75db2a8e7857","Type":"ContainerStarted","Data":"b57a015cb893b8fd3b8c4378e257dac963b07760aa2b19a200c115df4d3d093e"} Dec 13 04:00:02 crc kubenswrapper[5070]: I1213 04:00:02.013430 5070 generic.go:334] "Generic (PLEG): container finished" podID="83d41711-02f5-4543-909b-75db2a8e7857" containerID="4ef6548a2713ab68eeb2153ecfbf89fc53fa1d49af66d3c300204345cc84b784" exitCode=0 Dec 13 04:00:02 crc kubenswrapper[5070]: I1213 04:00:02.013489 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" event={"ID":"83d41711-02f5-4543-909b-75db2a8e7857","Type":"ContainerDied","Data":"4ef6548a2713ab68eeb2153ecfbf89fc53fa1d49af66d3c300204345cc84b784"} Dec 13 04:00:03 crc kubenswrapper[5070]: I1213 04:00:03.373027 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:03 crc kubenswrapper[5070]: I1213 04:00:03.491428 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdk58\" (UniqueName: \"kubernetes.io/projected/83d41711-02f5-4543-909b-75db2a8e7857-kube-api-access-cdk58\") pod \"83d41711-02f5-4543-909b-75db2a8e7857\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " Dec 13 04:00:03 crc kubenswrapper[5070]: I1213 04:00:03.491968 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83d41711-02f5-4543-909b-75db2a8e7857-secret-volume\") pod \"83d41711-02f5-4543-909b-75db2a8e7857\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " Dec 13 04:00:03 crc kubenswrapper[5070]: I1213 04:00:03.492359 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83d41711-02f5-4543-909b-75db2a8e7857-config-volume\") pod \"83d41711-02f5-4543-909b-75db2a8e7857\" (UID: \"83d41711-02f5-4543-909b-75db2a8e7857\") " Dec 13 04:00:03 crc kubenswrapper[5070]: I1213 04:00:03.493107 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83d41711-02f5-4543-909b-75db2a8e7857-config-volume" (OuterVolumeSpecName: "config-volume") pod "83d41711-02f5-4543-909b-75db2a8e7857" (UID: "83d41711-02f5-4543-909b-75db2a8e7857"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:00:03 crc kubenswrapper[5070]: I1213 04:00:03.494918 5070 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/83d41711-02f5-4543-909b-75db2a8e7857-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 04:00:03 crc kubenswrapper[5070]: I1213 04:00:03.686151 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83d41711-02f5-4543-909b-75db2a8e7857-kube-api-access-cdk58" (OuterVolumeSpecName: "kube-api-access-cdk58") pod "83d41711-02f5-4543-909b-75db2a8e7857" (UID: "83d41711-02f5-4543-909b-75db2a8e7857"). InnerVolumeSpecName "kube-api-access-cdk58". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:00:03 crc kubenswrapper[5070]: I1213 04:00:03.688892 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83d41711-02f5-4543-909b-75db2a8e7857-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "83d41711-02f5-4543-909b-75db2a8e7857" (UID: "83d41711-02f5-4543-909b-75db2a8e7857"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:00:03 crc kubenswrapper[5070]: I1213 04:00:03.698545 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdk58\" (UniqueName: \"kubernetes.io/projected/83d41711-02f5-4543-909b-75db2a8e7857-kube-api-access-cdk58\") on node \"crc\" DevicePath \"\"" Dec 13 04:00:03 crc kubenswrapper[5070]: I1213 04:00:03.698590 5070 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/83d41711-02f5-4543-909b-75db2a8e7857-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 04:00:04 crc kubenswrapper[5070]: I1213 04:00:04.037265 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" event={"ID":"83d41711-02f5-4543-909b-75db2a8e7857","Type":"ContainerDied","Data":"b57a015cb893b8fd3b8c4378e257dac963b07760aa2b19a200c115df4d3d093e"} Dec 13 04:00:04 crc kubenswrapper[5070]: I1213 04:00:04.037311 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b57a015cb893b8fd3b8c4378e257dac963b07760aa2b19a200c115df4d3d093e" Dec 13 04:00:04 crc kubenswrapper[5070]: I1213 04:00:04.037313 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p" Dec 13 04:00:04 crc kubenswrapper[5070]: I1213 04:00:04.447740 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7"] Dec 13 04:00:04 crc kubenswrapper[5070]: I1213 04:00:04.458858 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426595-c87m7"] Dec 13 04:00:06 crc kubenswrapper[5070]: I1213 04:00:06.182072 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22cbbb70-41e0-476b-9640-48c5c997cf72" path="/var/lib/kubelet/pods/22cbbb70-41e0-476b-9640-48c5c997cf72/volumes" Dec 13 04:00:21 crc kubenswrapper[5070]: I1213 04:00:21.942632 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:00:21 crc kubenswrapper[5070]: I1213 04:00:21.943055 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:00:26 crc kubenswrapper[5070]: I1213 04:00:26.541830 5070 scope.go:117] "RemoveContainer" containerID="66724e9d54ec859a3275ff82318d31d12435ce09922594453e332eaa87b9f4da" Dec 13 04:00:51 crc kubenswrapper[5070]: I1213 04:00:51.942541 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:00:51 crc kubenswrapper[5070]: I1213 04:00:51.943079 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:00:51 crc kubenswrapper[5070]: I1213 04:00:51.943160 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 04:00:51 crc kubenswrapper[5070]: I1213 04:00:51.943943 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9987099f8e98d31d38862264f7e07bca0970527ae353fb5b7e9a8135e3aed8d3"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 04:00:51 crc kubenswrapper[5070]: I1213 04:00:51.943995 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://9987099f8e98d31d38862264f7e07bca0970527ae353fb5b7e9a8135e3aed8d3" gracePeriod=600 Dec 13 04:00:52 crc kubenswrapper[5070]: I1213 04:00:52.499840 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="9987099f8e98d31d38862264f7e07bca0970527ae353fb5b7e9a8135e3aed8d3" exitCode=0 Dec 13 04:00:52 crc kubenswrapper[5070]: I1213 04:00:52.499922 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"9987099f8e98d31d38862264f7e07bca0970527ae353fb5b7e9a8135e3aed8d3"} Dec 13 04:00:52 crc kubenswrapper[5070]: I1213 04:00:52.500294 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298"} Dec 13 04:00:52 crc kubenswrapper[5070]: I1213 04:00:52.500311 5070 scope.go:117] "RemoveContainer" containerID="56c86f8149d0aae997cc460a6956ca1f0ca99356e791ef266d0035921c2b2a17" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.155034 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29426641-b6wg6"] Dec 13 04:01:00 crc kubenswrapper[5070]: E1213 04:01:00.156018 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d41711-02f5-4543-909b-75db2a8e7857" containerName="collect-profiles" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.156034 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d41711-02f5-4543-909b-75db2a8e7857" containerName="collect-profiles" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.156225 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="83d41711-02f5-4543-909b-75db2a8e7857" containerName="collect-profiles" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.157549 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.181613 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29426641-b6wg6"] Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.326931 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-fernet-keys\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.327041 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-config-data\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.327142 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-combined-ca-bundle\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.327226 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22nzr\" (UniqueName: \"kubernetes.io/projected/3b5bc78b-9491-4608-ba14-e198834b091a-kube-api-access-22nzr\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.428595 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-fernet-keys\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.428772 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-config-data\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.428841 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-combined-ca-bundle\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.428924 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22nzr\" (UniqueName: \"kubernetes.io/projected/3b5bc78b-9491-4608-ba14-e198834b091a-kube-api-access-22nzr\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.435189 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-config-data\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.435618 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-fernet-keys\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.435942 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-combined-ca-bundle\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.466711 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22nzr\" (UniqueName: \"kubernetes.io/projected/3b5bc78b-9491-4608-ba14-e198834b091a-kube-api-access-22nzr\") pod \"keystone-cron-29426641-b6wg6\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.482970 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:00 crc kubenswrapper[5070]: I1213 04:01:00.933989 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29426641-b6wg6"] Dec 13 04:01:01 crc kubenswrapper[5070]: I1213 04:01:01.578491 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29426641-b6wg6" event={"ID":"3b5bc78b-9491-4608-ba14-e198834b091a","Type":"ContainerStarted","Data":"368f01514c40d9954737c67c3e0c1b853daf5c07ca27c0ba010eac5f23943710"} Dec 13 04:01:01 crc kubenswrapper[5070]: I1213 04:01:01.579915 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29426641-b6wg6" event={"ID":"3b5bc78b-9491-4608-ba14-e198834b091a","Type":"ContainerStarted","Data":"67988a2e63bce816b4fbccbe2ab64668c3c1c11dac4afa3f506715c54978570e"} Dec 13 04:01:01 crc kubenswrapper[5070]: I1213 04:01:01.609575 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29426641-b6wg6" podStartSLOduration=1.6095469470000001 podStartE2EDuration="1.609546947s" podCreationTimestamp="2025-12-13 04:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 04:01:01.601241991 +0000 UTC m=+2953.837085537" watchObservedRunningTime="2025-12-13 04:01:01.609546947 +0000 UTC m=+2953.845390493" Dec 13 04:01:03 crc kubenswrapper[5070]: I1213 04:01:03.594283 5070 generic.go:334] "Generic (PLEG): container finished" podID="3b5bc78b-9491-4608-ba14-e198834b091a" containerID="368f01514c40d9954737c67c3e0c1b853daf5c07ca27c0ba010eac5f23943710" exitCode=0 Dec 13 04:01:03 crc kubenswrapper[5070]: I1213 04:01:03.594397 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29426641-b6wg6" event={"ID":"3b5bc78b-9491-4608-ba14-e198834b091a","Type":"ContainerDied","Data":"368f01514c40d9954737c67c3e0c1b853daf5c07ca27c0ba010eac5f23943710"} Dec 13 04:01:04 crc kubenswrapper[5070]: I1213 04:01:04.921090 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.006680 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22nzr\" (UniqueName: \"kubernetes.io/projected/3b5bc78b-9491-4608-ba14-e198834b091a-kube-api-access-22nzr\") pod \"3b5bc78b-9491-4608-ba14-e198834b091a\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.006762 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-fernet-keys\") pod \"3b5bc78b-9491-4608-ba14-e198834b091a\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.006950 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-combined-ca-bundle\") pod \"3b5bc78b-9491-4608-ba14-e198834b091a\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.007001 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-config-data\") pod \"3b5bc78b-9491-4608-ba14-e198834b091a\" (UID: \"3b5bc78b-9491-4608-ba14-e198834b091a\") " Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.012773 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3b5bc78b-9491-4608-ba14-e198834b091a" (UID: "3b5bc78b-9491-4608-ba14-e198834b091a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.016561 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b5bc78b-9491-4608-ba14-e198834b091a-kube-api-access-22nzr" (OuterVolumeSpecName: "kube-api-access-22nzr") pod "3b5bc78b-9491-4608-ba14-e198834b091a" (UID: "3b5bc78b-9491-4608-ba14-e198834b091a"). InnerVolumeSpecName "kube-api-access-22nzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.035559 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b5bc78b-9491-4608-ba14-e198834b091a" (UID: "3b5bc78b-9491-4608-ba14-e198834b091a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.066264 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-config-data" (OuterVolumeSpecName: "config-data") pod "3b5bc78b-9491-4608-ba14-e198834b091a" (UID: "3b5bc78b-9491-4608-ba14-e198834b091a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.108972 5070 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.109006 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.109017 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b5bc78b-9491-4608-ba14-e198834b091a-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.109026 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22nzr\" (UniqueName: \"kubernetes.io/projected/3b5bc78b-9491-4608-ba14-e198834b091a-kube-api-access-22nzr\") on node \"crc\" DevicePath \"\"" Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.617761 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29426641-b6wg6" event={"ID":"3b5bc78b-9491-4608-ba14-e198834b091a","Type":"ContainerDied","Data":"67988a2e63bce816b4fbccbe2ab64668c3c1c11dac4afa3f506715c54978570e"} Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.618114 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67988a2e63bce816b4fbccbe2ab64668c3c1c11dac4afa3f506715c54978570e" Dec 13 04:01:05 crc kubenswrapper[5070]: I1213 04:01:05.617934 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29426641-b6wg6" Dec 13 04:02:02 crc kubenswrapper[5070]: I1213 04:02:02.156162 5070 generic.go:334] "Generic (PLEG): container finished" podID="62c6b21d-3500-4f16-b958-7a59dd7a7fda" containerID="de01bda092b4bb3b1fe593360fbb02a48c9fb604dd3278c5985b026fcb12b5e0" exitCode=0 Dec 13 04:02:02 crc kubenswrapper[5070]: I1213 04:02:02.156263 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" event={"ID":"62c6b21d-3500-4f16-b958-7a59dd7a7fda","Type":"ContainerDied","Data":"de01bda092b4bb3b1fe593360fbb02a48c9fb604dd3278c5985b026fcb12b5e0"} Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.517010 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.613499 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-1\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.613579 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-extra-config-0\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.613631 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-custom-ceph-combined-ca-bundle\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.613773 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5nlb\" (UniqueName: \"kubernetes.io/projected/62c6b21d-3500-4f16-b958-7a59dd7a7fda-kube-api-access-m5nlb\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.613812 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.613887 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-0\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.613960 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-1\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.614102 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-inventory\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.614169 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph-nova-0\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.614219 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-0\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.614256 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ssh-key\") pod \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\" (UID: \"62c6b21d-3500-4f16-b958-7a59dd7a7fda\") " Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.620856 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph" (OuterVolumeSpecName: "ceph") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.620925 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.621784 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c6b21d-3500-4f16-b958-7a59dd7a7fda-kube-api-access-m5nlb" (OuterVolumeSpecName: "kube-api-access-m5nlb") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "kube-api-access-m5nlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.648435 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.657991 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.662796 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.663532 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.663870 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.671809 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.671944 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.687783 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-inventory" (OuterVolumeSpecName: "inventory") pod "62c6b21d-3500-4f16-b958-7a59dd7a7fda" (UID: "62c6b21d-3500-4f16-b958-7a59dd7a7fda"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.717146 5070 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.717392 5070 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.717571 5070 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.717655 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.717731 5070 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.717805 5070 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.717886 5070 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.717965 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5nlb\" (UniqueName: \"kubernetes.io/projected/62c6b21d-3500-4f16-b958-7a59dd7a7fda-kube-api-access-m5nlb\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.718050 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.718124 5070 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:03 crc kubenswrapper[5070]: I1213 04:02:03.718213 5070 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/62c6b21d-3500-4f16-b958-7a59dd7a7fda-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:04 crc kubenswrapper[5070]: I1213 04:02:04.172457 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" Dec 13 04:02:04 crc kubenswrapper[5070]: I1213 04:02:04.179721 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd" event={"ID":"62c6b21d-3500-4f16-b958-7a59dd7a7fda","Type":"ContainerDied","Data":"188512ebb3ed6de427eaaf4e5203aeb5487784df2b47176f37656f46e974ccef"} Dec 13 04:02:04 crc kubenswrapper[5070]: I1213 04:02:04.179768 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="188512ebb3ed6de427eaaf4e5203aeb5487784df2b47176f37656f46e974ccef" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.558733 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 13 04:02:18 crc kubenswrapper[5070]: E1213 04:02:18.559762 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b5bc78b-9491-4608-ba14-e198834b091a" containerName="keystone-cron" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.559780 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b5bc78b-9491-4608-ba14-e198834b091a" containerName="keystone-cron" Dec 13 04:02:18 crc kubenswrapper[5070]: E1213 04:02:18.559805 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c6b21d-3500-4f16-b958-7a59dd7a7fda" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.559815 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c6b21d-3500-4f16-b958-7a59dd7a7fda" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.560017 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c6b21d-3500-4f16-b958-7a59dd7a7fda" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.560052 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b5bc78b-9491-4608-ba14-e198834b091a" containerName="keystone-cron" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.561182 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.563279 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.563776 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.586416 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.587970 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.590162 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.595969 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596027 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596065 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6klxz\" (UniqueName: \"kubernetes.io/projected/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-kube-api-access-6klxz\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596139 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596176 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-sys\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596202 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596228 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596260 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596285 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596337 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596364 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-dev\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596394 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596410 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596428 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596465 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-run\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.596481 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.598790 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.623638 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.697742 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6klxz\" (UniqueName: \"kubernetes.io/projected/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-kube-api-access-6klxz\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.697824 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-sys\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.697853 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-scripts\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.697886 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.697928 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.697970 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-sys\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.697996 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698024 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698056 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698084 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-etc-nvme\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698111 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698132 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-lib-modules\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698156 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698184 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698214 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-ceph\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698261 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-run\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698298 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrmnh\" (UniqueName: \"kubernetes.io/projected/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-kube-api-access-zrmnh\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698320 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698364 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-dev\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698403 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698472 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698498 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698536 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698573 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-config-data\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698602 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-run\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698622 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698647 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-dev\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698683 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698711 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698750 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698771 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-config-data-custom\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.698800 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.699094 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.699589 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.699686 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.699759 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.699819 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-dev\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.700399 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-run\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.700529 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-sys\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.700668 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.700693 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.700760 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.708489 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.709153 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.712233 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.714157 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.719300 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6klxz\" (UniqueName: \"kubernetes.io/projected/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-kube-api-access-6klxz\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.723754 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd14d72-1dc4-4ab9-8b92-27e740e7eada-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"8fd14d72-1dc4-4ab9-8b92-27e740e7eada\") " pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800424 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-config-data\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800489 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-dev\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800513 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800564 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800645 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800658 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-dev\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800695 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-config-data-custom\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800855 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-sys\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800881 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-scripts\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800925 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800936 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-sys\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.800965 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801020 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801082 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-etc-nvme\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801105 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-lib-modules\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801142 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801153 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801165 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-ceph\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801240 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-etc-nvme\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801259 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-run\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801273 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-lib-modules\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801307 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801314 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrmnh\" (UniqueName: \"kubernetes.io/projected/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-kube-api-access-zrmnh\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801331 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-run\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801392 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.801577 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.804483 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-scripts\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.805549 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-ceph\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.808831 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-config-data\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.809249 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.810297 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-config-data-custom\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.818191 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrmnh\" (UniqueName: \"kubernetes.io/projected/21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b-kube-api-access-zrmnh\") pod \"cinder-backup-0\" (UID: \"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b\") " pod="openstack/cinder-backup-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.879986 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:18 crc kubenswrapper[5070]: I1213 04:02:18.915591 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.002152 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-7ncs9"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.004043 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-7ncs9" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.019217 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-7ncs9"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.106905 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m72ml\" (UniqueName: \"kubernetes.io/projected/b1e14e43-fceb-409e-8d3b-9e0435a19506-kube-api-access-m72ml\") pod \"manila-db-create-7ncs9\" (UID: \"b1e14e43-fceb-409e-8d3b-9e0435a19506\") " pod="openstack/manila-db-create-7ncs9" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.107039 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-64b5b8c565-xr2cz"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.108537 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.117507 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.117560 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.117800 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.117900 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-fwkbm" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.135299 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64b5b8c565-xr2cz"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.208020 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb68n\" (UniqueName: \"kubernetes.io/projected/b6951934-d07e-4e5a-930c-779502792de9-kube-api-access-pb68n\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.208291 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-scripts\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.208316 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6951934-d07e-4e5a-930c-779502792de9-logs\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.208377 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m72ml\" (UniqueName: \"kubernetes.io/projected/b1e14e43-fceb-409e-8d3b-9e0435a19506-kube-api-access-m72ml\") pod \"manila-db-create-7ncs9\" (UID: \"b1e14e43-fceb-409e-8d3b-9e0435a19506\") " pod="openstack/manila-db-create-7ncs9" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.208490 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-config-data\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.208533 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b6951934-d07e-4e5a-930c-779502792de9-horizon-secret-key\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.240511 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-784865f99f-5r8dj"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.243220 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.263687 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.265343 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.267077 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.267297 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.268061 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.273408 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-7z77k" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.294240 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-784865f99f-5r8dj"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.295093 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m72ml\" (UniqueName: \"kubernetes.io/projected/b1e14e43-fceb-409e-8d3b-9e0435a19506-kube-api-access-m72ml\") pod \"manila-db-create-7ncs9\" (UID: \"b1e14e43-fceb-409e-8d3b-9e0435a19506\") " pod="openstack/manila-db-create-7ncs9" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.312252 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-config-data\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.312333 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b6951934-d07e-4e5a-930c-779502792de9-horizon-secret-key\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.312402 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb68n\" (UniqueName: \"kubernetes.io/projected/b6951934-d07e-4e5a-930c-779502792de9-kube-api-access-pb68n\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.312430 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-scripts\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.312545 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6951934-d07e-4e5a-930c-779502792de9-logs\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.315135 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.332931 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6951934-d07e-4e5a-930c-779502792de9-logs\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.336310 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-scripts\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.336349 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-config-data\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.339748 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.344782 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.348425 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b6951934-d07e-4e5a-930c-779502792de9-horizon-secret-key\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.348677 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.348817 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.351904 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb68n\" (UniqueName: \"kubernetes.io/projected/b6951934-d07e-4e5a-930c-779502792de9-kube-api-access-pb68n\") pod \"horizon-64b5b8c565-xr2cz\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.360170 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-7ncs9" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.361419 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.415740 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.415801 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-logs\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.415828 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24mcs\" (UniqueName: \"kubernetes.io/projected/904c7826-4376-4e6d-a520-a12e30a3495d-kube-api-access-24mcs\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.415864 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-scripts\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.415883 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.415909 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-ceph\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.415926 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/904c7826-4376-4e6d-a520-a12e30a3495d-horizon-secret-key\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.415948 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.415969 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.415986 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/904c7826-4376-4e6d-a520-a12e30a3495d-logs\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.416010 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-scripts\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.416027 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-config-data\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.416042 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg5bd\" (UniqueName: \"kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-kube-api-access-sg5bd\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.416080 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-config-data\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.451204 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.518490 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mngdv\" (UniqueName: \"kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-kube-api-access-mngdv\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.518557 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.518593 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.518635 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-ceph\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.518958 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/904c7826-4376-4e6d-a520-a12e30a3495d-horizon-secret-key\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519008 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519036 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519065 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/904c7826-4376-4e6d-a520-a12e30a3495d-logs\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519103 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-scripts\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519128 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-config-data\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519157 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg5bd\" (UniqueName: \"kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-kube-api-access-sg5bd\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519186 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519212 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-config-data\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519246 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519317 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519356 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-logs\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519500 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519583 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519633 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-logs\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519670 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24mcs\" (UniqueName: \"kubernetes.io/projected/904c7826-4376-4e6d-a520-a12e30a3495d-kube-api-access-24mcs\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519704 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-ceph\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519746 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.519775 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-scripts\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.520730 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-scripts\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.525944 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.526978 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.527516 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-logs\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.527809 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/904c7826-4376-4e6d-a520-a12e30a3495d-logs\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.528699 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-ceph\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.528784 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-config-data\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.529773 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.531601 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-config-data\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.531864 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.535400 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/904c7826-4376-4e6d-a520-a12e30a3495d-horizon-secret-key\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.548785 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-scripts\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.552048 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24mcs\" (UniqueName: \"kubernetes.io/projected/904c7826-4376-4e6d-a520-a12e30a3495d-kube-api-access-24mcs\") pod \"horizon-784865f99f-5r8dj\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.567373 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg5bd\" (UniqueName: \"kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-kube-api-access-sg5bd\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.607686 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.654505 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.654589 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-ceph\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.654619 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.654661 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mngdv\" (UniqueName: \"kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-kube-api-access-mngdv\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.654695 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.654842 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.654880 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.654945 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.654976 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-logs\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.657124 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-logs\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.657143 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.657669 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.668277 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.669549 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.670236 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.670618 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.671302 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-ceph\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.678470 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mngdv\" (UniqueName: \"kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-kube-api-access-mngdv\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.695072 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.710121 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.729827 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.759882 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.771549 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.798025 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:19 crc kubenswrapper[5070]: I1213 04:02:19.849808 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-7ncs9"] Dec 13 04:02:20 crc kubenswrapper[5070]: I1213 04:02:20.128668 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64b5b8c565-xr2cz"] Dec 13 04:02:20 crc kubenswrapper[5070]: I1213 04:02:20.373174 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b","Type":"ContainerStarted","Data":"13ba7cfe992e251b166545022698954e62fe6c9b779ad626f224c9cca37c0146"} Dec 13 04:02:20 crc kubenswrapper[5070]: I1213 04:02:20.374905 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64b5b8c565-xr2cz" event={"ID":"b6951934-d07e-4e5a-930c-779502792de9","Type":"ContainerStarted","Data":"43bb0bf2405a3d6fc96629b7d8f0949af48b3dd6ff260f238a9a64c178f9513a"} Dec 13 04:02:20 crc kubenswrapper[5070]: I1213 04:02:20.377062 5070 generic.go:334] "Generic (PLEG): container finished" podID="b1e14e43-fceb-409e-8d3b-9e0435a19506" containerID="d25d1774b928abc9074a4dc3428df243c34c6814f93634963b68396493ab5ea6" exitCode=0 Dec 13 04:02:20 crc kubenswrapper[5070]: I1213 04:02:20.377197 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-7ncs9" event={"ID":"b1e14e43-fceb-409e-8d3b-9e0435a19506","Type":"ContainerDied","Data":"d25d1774b928abc9074a4dc3428df243c34c6814f93634963b68396493ab5ea6"} Dec 13 04:02:20 crc kubenswrapper[5070]: I1213 04:02:20.377331 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-7ncs9" event={"ID":"b1e14e43-fceb-409e-8d3b-9e0435a19506","Type":"ContainerStarted","Data":"c5732e3ccdca1779af6b3776798e5f02d516b97911c5147e701051722d2be719"} Dec 13 04:02:20 crc kubenswrapper[5070]: I1213 04:02:20.494359 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-784865f99f-5r8dj"] Dec 13 04:02:20 crc kubenswrapper[5070]: I1213 04:02:20.704759 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 13 04:02:20 crc kubenswrapper[5070]: W1213 04:02:20.710197 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fd14d72_1dc4_4ab9_8b92_27e740e7eada.slice/crio-c0adaff751597cbdf59ae4face34ae2ff51407ee74d73267f38bee87f5a4b033 WatchSource:0}: Error finding container c0adaff751597cbdf59ae4face34ae2ff51407ee74d73267f38bee87f5a4b033: Status 404 returned error can't find the container with id c0adaff751597cbdf59ae4face34ae2ff51407ee74d73267f38bee87f5a4b033 Dec 13 04:02:20 crc kubenswrapper[5070]: I1213 04:02:20.788528 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 04:02:20 crc kubenswrapper[5070]: W1213 04:02:20.794363 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3fbb90b2_2bcf_4b11_a65a_8ac1bd09462d.slice/crio-10ebe54a34b18ac657a16f15b26788c89f82109e063c71fdc503115555fb2c7e WatchSource:0}: Error finding container 10ebe54a34b18ac657a16f15b26788c89f82109e063c71fdc503115555fb2c7e: Status 404 returned error can't find the container with id 10ebe54a34b18ac657a16f15b26788c89f82109e063c71fdc503115555fb2c7e Dec 13 04:02:21 crc kubenswrapper[5070]: I1213 04:02:21.409915 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d","Type":"ContainerStarted","Data":"10ebe54a34b18ac657a16f15b26788c89f82109e063c71fdc503115555fb2c7e"} Dec 13 04:02:21 crc kubenswrapper[5070]: I1213 04:02:21.416564 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b","Type":"ContainerStarted","Data":"407839850b339f46a67afff48e8280d8dbf8d81867c16c4759192b7650e1f4b7"} Dec 13 04:02:21 crc kubenswrapper[5070]: I1213 04:02:21.416614 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b","Type":"ContainerStarted","Data":"a59e28f49d9f4f7851447e1b9d1ff7925f422a9510bd3eaf12c4ebc462fcacd6"} Dec 13 04:02:21 crc kubenswrapper[5070]: I1213 04:02:21.425389 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-784865f99f-5r8dj" event={"ID":"904c7826-4376-4e6d-a520-a12e30a3495d","Type":"ContainerStarted","Data":"49ae469fb515ab561a97ee59ece123c834629a033dc446309bc22f85e049ee1e"} Dec 13 04:02:21 crc kubenswrapper[5070]: I1213 04:02:21.430879 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"8fd14d72-1dc4-4ab9-8b92-27e740e7eada","Type":"ContainerStarted","Data":"c0adaff751597cbdf59ae4face34ae2ff51407ee74d73267f38bee87f5a4b033"} Dec 13 04:02:21 crc kubenswrapper[5070]: I1213 04:02:21.459549 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.4865482930000002 podStartE2EDuration="3.459523028s" podCreationTimestamp="2025-12-13 04:02:18 +0000 UTC" firstStartedPulling="2025-12-13 04:02:19.709874616 +0000 UTC m=+3031.945718162" lastFinishedPulling="2025-12-13 04:02:20.682849351 +0000 UTC m=+3032.918692897" observedRunningTime="2025-12-13 04:02:21.442786702 +0000 UTC m=+3033.678630248" watchObservedRunningTime="2025-12-13 04:02:21.459523028 +0000 UTC m=+3033.695366584" Dec 13 04:02:21 crc kubenswrapper[5070]: I1213 04:02:21.482751 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 04:02:21 crc kubenswrapper[5070]: I1213 04:02:21.949079 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-7ncs9" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.032486 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m72ml\" (UniqueName: \"kubernetes.io/projected/b1e14e43-fceb-409e-8d3b-9e0435a19506-kube-api-access-m72ml\") pod \"b1e14e43-fceb-409e-8d3b-9e0435a19506\" (UID: \"b1e14e43-fceb-409e-8d3b-9e0435a19506\") " Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.050458 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1e14e43-fceb-409e-8d3b-9e0435a19506-kube-api-access-m72ml" (OuterVolumeSpecName: "kube-api-access-m72ml") pod "b1e14e43-fceb-409e-8d3b-9e0435a19506" (UID: "b1e14e43-fceb-409e-8d3b-9e0435a19506"). InnerVolumeSpecName "kube-api-access-m72ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.135748 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m72ml\" (UniqueName: \"kubernetes.io/projected/b1e14e43-fceb-409e-8d3b-9e0435a19506-kube-api-access-m72ml\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.334854 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64b5b8c565-xr2cz"] Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.392056 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-677fbcc968-jlxpr"] Dec 13 04:02:22 crc kubenswrapper[5070]: E1213 04:02:22.393178 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1e14e43-fceb-409e-8d3b-9e0435a19506" containerName="mariadb-database-create" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.393198 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1e14e43-fceb-409e-8d3b-9e0435a19506" containerName="mariadb-database-create" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.393720 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1e14e43-fceb-409e-8d3b-9e0435a19506" containerName="mariadb-database-create" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.406621 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.422615 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.437073 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.468504 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-677fbcc968-jlxpr"] Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.477325 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-7ncs9" event={"ID":"b1e14e43-fceb-409e-8d3b-9e0435a19506","Type":"ContainerDied","Data":"c5732e3ccdca1779af6b3776798e5f02d516b97911c5147e701051722d2be719"} Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.477367 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5732e3ccdca1779af6b3776798e5f02d516b97911c5147e701051722d2be719" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.477430 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-7ncs9" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.478499 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-combined-ca-bundle\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.478633 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-config-data\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.478669 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-tls-certs\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.478689 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-secret-key\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.479514 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9c8n\" (UniqueName: \"kubernetes.io/projected/0f20c4db-2943-474f-970d-02d52f185c6e-kube-api-access-l9c8n\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.479726 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-scripts\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.479762 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f20c4db-2943-474f-970d-02d52f185c6e-logs\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.479968 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0","Type":"ContainerStarted","Data":"6a1ebedbab166c210ab2bc2d3ee8c08ba7728c684acae6d46152b7b1fac0a6bc"} Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.481648 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d","Type":"ContainerStarted","Data":"42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84"} Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.497154 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-784865f99f-5r8dj"] Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.535358 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-8655d596d8-lz82d"] Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.537007 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.573070 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582180 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-scripts\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582251 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f20c4db-2943-474f-970d-02d52f185c6e-logs\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582362 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-horizon-secret-key\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582416 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-combined-ca-bundle\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582463 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-horizon-tls-certs\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582527 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-logs\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582588 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-config-data\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582616 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-tls-certs\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582641 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-combined-ca-bundle\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582663 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-secret-key\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582711 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9c8n\" (UniqueName: \"kubernetes.io/projected/0f20c4db-2943-474f-970d-02d52f185c6e-kube-api-access-l9c8n\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582736 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-scripts\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582854 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-config-data\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.582920 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n5tv\" (UniqueName: \"kubernetes.io/projected/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-kube-api-access-9n5tv\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.583189 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-scripts\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.583396 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f20c4db-2943-474f-970d-02d52f185c6e-logs\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.584819 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-config-data\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.587062 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-combined-ca-bundle\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.589814 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8655d596d8-lz82d"] Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.591432 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-tls-certs\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.593643 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-secret-key\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.612012 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9c8n\" (UniqueName: \"kubernetes.io/projected/0f20c4db-2943-474f-970d-02d52f185c6e-kube-api-access-l9c8n\") pod \"horizon-677fbcc968-jlxpr\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.684754 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-logs\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.684842 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-combined-ca-bundle\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.684901 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-scripts\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.684952 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-config-data\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.684996 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n5tv\" (UniqueName: \"kubernetes.io/projected/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-kube-api-access-9n5tv\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.685088 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-horizon-secret-key\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.685140 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-horizon-tls-certs\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.686449 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-logs\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.688555 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-scripts\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.688833 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-config-data\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.689327 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-horizon-tls-certs\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.696113 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-horizon-secret-key\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.696272 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-combined-ca-bundle\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.715633 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n5tv\" (UniqueName: \"kubernetes.io/projected/c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde-kube-api-access-9n5tv\") pod \"horizon-8655d596d8-lz82d\" (UID: \"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde\") " pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.792303 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:22 crc kubenswrapper[5070]: I1213 04:02:22.912560 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.350171 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-677fbcc968-jlxpr"] Dec 13 04:02:23 crc kubenswrapper[5070]: W1213 04:02:23.361941 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f20c4db_2943_474f_970d_02d52f185c6e.slice/crio-f469d6fe4621c2131ae6b361161ce0367bc8a0c37603b9e96b4d02edc092eac2 WatchSource:0}: Error finding container f469d6fe4621c2131ae6b361161ce0367bc8a0c37603b9e96b4d02edc092eac2: Status 404 returned error can't find the container with id f469d6fe4621c2131ae6b361161ce0367bc8a0c37603b9e96b4d02edc092eac2 Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.476398 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-8655d596d8-lz82d"] Dec 13 04:02:23 crc kubenswrapper[5070]: W1213 04:02:23.480327 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1f8d0b8_5e4f_4237_a0f3_fa38d0c2bfde.slice/crio-cabe6c21f820895e4f76330d10284c1e5aafa3450b2d892a5ca6aebd01c17916 WatchSource:0}: Error finding container cabe6c21f820895e4f76330d10284c1e5aafa3450b2d892a5ca6aebd01c17916: Status 404 returned error can't find the container with id cabe6c21f820895e4f76330d10284c1e5aafa3450b2d892a5ca6aebd01c17916 Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.511988 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8655d596d8-lz82d" event={"ID":"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde","Type":"ContainerStarted","Data":"cabe6c21f820895e4f76330d10284c1e5aafa3450b2d892a5ca6aebd01c17916"} Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.517366 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0","Type":"ContainerStarted","Data":"8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3"} Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.517416 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0","Type":"ContainerStarted","Data":"b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a"} Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.517584 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" containerName="glance-log" containerID="cri-o://b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a" gracePeriod=30 Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.517666 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" containerName="glance-httpd" containerID="cri-o://8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3" gracePeriod=30 Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.525699 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-677fbcc968-jlxpr" event={"ID":"0f20c4db-2943-474f-970d-02d52f185c6e","Type":"ContainerStarted","Data":"f469d6fe4621c2131ae6b361161ce0367bc8a0c37603b9e96b4d02edc092eac2"} Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.530839 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d","Type":"ContainerStarted","Data":"99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a"} Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.531020 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" containerName="glance-log" containerID="cri-o://42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84" gracePeriod=30 Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.531293 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" containerName="glance-httpd" containerID="cri-o://99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a" gracePeriod=30 Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.536606 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"8fd14d72-1dc4-4ab9-8b92-27e740e7eada","Type":"ContainerStarted","Data":"f326230fc8444e834f888ccc65c260a05f516399119db58bf2e0a40a850edc48"} Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.536643 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"8fd14d72-1dc4-4ab9-8b92-27e740e7eada","Type":"ContainerStarted","Data":"19f6efee76883b2a51c6263def8765823f126279adc4e216fef618f2fa346e17"} Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.550122 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.550083626 podStartE2EDuration="4.550083626s" podCreationTimestamp="2025-12-13 04:02:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 04:02:23.544583176 +0000 UTC m=+3035.780426742" watchObservedRunningTime="2025-12-13 04:02:23.550083626 +0000 UTC m=+3035.785927172" Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.574860 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=3.635805549 podStartE2EDuration="5.57483098s" podCreationTimestamp="2025-12-13 04:02:18 +0000 UTC" firstStartedPulling="2025-12-13 04:02:20.722606956 +0000 UTC m=+3032.958450502" lastFinishedPulling="2025-12-13 04:02:22.661632387 +0000 UTC m=+3034.897475933" observedRunningTime="2025-12-13 04:02:23.568814916 +0000 UTC m=+3035.804658462" watchObservedRunningTime="2025-12-13 04:02:23.57483098 +0000 UTC m=+3035.810674556" Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.599880 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.5998552329999995 podStartE2EDuration="4.599855233s" podCreationTimestamp="2025-12-13 04:02:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 04:02:23.59460493 +0000 UTC m=+3035.830448486" watchObservedRunningTime="2025-12-13 04:02:23.599855233 +0000 UTC m=+3035.835698779" Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.880126 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:23 crc kubenswrapper[5070]: I1213 04:02:23.916669 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.234824 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.342353 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351318 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-httpd-run\") pod \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351372 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-combined-ca-bundle\") pod \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351394 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg5bd\" (UniqueName: \"kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-kube-api-access-sg5bd\") pod \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351410 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-logs\") pod \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351473 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-combined-ca-bundle\") pod \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351491 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-config-data\") pod \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351508 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mngdv\" (UniqueName: \"kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-kube-api-access-mngdv\") pod \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351530 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-logs\") pod \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351556 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-public-tls-certs\") pod \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351571 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-scripts\") pod \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351604 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351624 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-config-data\") pod \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351648 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-internal-tls-certs\") pod \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351668 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-ceph\") pod \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351715 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-ceph\") pod \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\" (UID: \"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351745 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351768 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-httpd-run\") pod \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.351782 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-scripts\") pod \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\" (UID: \"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0\") " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.358275 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-scripts" (OuterVolumeSpecName: "scripts") pod "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" (UID: "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.358481 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" (UID: "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.358850 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-logs" (OuterVolumeSpecName: "logs") pod "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" (UID: "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.360225 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" (UID: "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.360785 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-logs" (OuterVolumeSpecName: "logs") pod "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" (UID: "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.362576 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-scripts" (OuterVolumeSpecName: "scripts") pod "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" (UID: "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.369783 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-ceph" (OuterVolumeSpecName: "ceph") pod "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" (UID: "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.384262 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-ceph" (OuterVolumeSpecName: "ceph") pod "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" (UID: "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.398145 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" (UID: "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.398191 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-kube-api-access-mngdv" (OuterVolumeSpecName: "kube-api-access-mngdv") pod "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" (UID: "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d"). InnerVolumeSpecName "kube-api-access-mngdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.398263 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-kube-api-access-sg5bd" (OuterVolumeSpecName: "kube-api-access-sg5bd") pod "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" (UID: "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0"). InnerVolumeSpecName "kube-api-access-sg5bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.412588 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" (UID: "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.416931 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" (UID: "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454799 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-logs\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454831 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454857 5070 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454870 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454886 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454905 5070 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454920 5070 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454931 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454942 5070 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454953 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454967 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg5bd\" (UniqueName: \"kubernetes.io/projected/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-kube-api-access-sg5bd\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454978 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-logs\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.454989 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mngdv\" (UniqueName: \"kubernetes.io/projected/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-kube-api-access-mngdv\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.458333 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" (UID: "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.458614 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" (UID: "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.482539 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-config-data" (OuterVolumeSpecName: "config-data") pod "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" (UID: "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.483113 5070 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.498759 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" (UID: "3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.511037 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-config-data" (OuterVolumeSpecName: "config-data") pod "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" (UID: "89ec3336-0c2e-4377-9cb8-fd3ded3af7d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.518641 5070 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.558574 5070 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.558610 5070 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.558628 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.558639 5070 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.558648 5070 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.558660 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.558668 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.570755 5070 generic.go:334] "Generic (PLEG): container finished" podID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" containerID="99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a" exitCode=0 Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.570795 5070 generic.go:334] "Generic (PLEG): container finished" podID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" containerID="42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84" exitCode=143 Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.570874 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d","Type":"ContainerDied","Data":"99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a"} Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.570904 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d","Type":"ContainerDied","Data":"42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84"} Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.570924 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d","Type":"ContainerDied","Data":"10ebe54a34b18ac657a16f15b26788c89f82109e063c71fdc503115555fb2c7e"} Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.570941 5070 scope.go:117] "RemoveContainer" containerID="99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.571097 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.579264 5070 generic.go:334] "Generic (PLEG): container finished" podID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" containerID="8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3" exitCode=143 Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.579303 5070 generic.go:334] "Generic (PLEG): container finished" podID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" containerID="b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a" exitCode=143 Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.579459 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0","Type":"ContainerDied","Data":"8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3"} Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.579509 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.579534 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0","Type":"ContainerDied","Data":"b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a"} Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.579552 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89ec3336-0c2e-4377-9cb8-fd3ded3af7d0","Type":"ContainerDied","Data":"6a1ebedbab166c210ab2bc2d3ee8c08ba7728c684acae6d46152b7b1fac0a6bc"} Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.621008 5070 scope.go:117] "RemoveContainer" containerID="42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.628552 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.667528 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.693114 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 04:02:24 crc kubenswrapper[5070]: E1213 04:02:24.693524 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" containerName="glance-log" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.693538 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" containerName="glance-log" Dec 13 04:02:24 crc kubenswrapper[5070]: E1213 04:02:24.693551 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" containerName="glance-httpd" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.693557 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" containerName="glance-httpd" Dec 13 04:02:24 crc kubenswrapper[5070]: E1213 04:02:24.693576 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" containerName="glance-log" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.693583 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" containerName="glance-log" Dec 13 04:02:24 crc kubenswrapper[5070]: E1213 04:02:24.693596 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" containerName="glance-httpd" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.693602 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" containerName="glance-httpd" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.693769 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" containerName="glance-log" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.693778 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" containerName="glance-httpd" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.693785 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" containerName="glance-log" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.693799 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" containerName="glance-httpd" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.699048 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.703086 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.703358 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-7z77k" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.703565 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.705171 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.729092 5070 scope.go:117] "RemoveContainer" containerID="99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.730823 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 04:02:24 crc kubenswrapper[5070]: E1213 04:02:24.733007 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a\": container with ID starting with 99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a not found: ID does not exist" containerID="99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.733060 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a"} err="failed to get container status \"99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a\": rpc error: code = NotFound desc = could not find container \"99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a\": container with ID starting with 99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a not found: ID does not exist" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.733087 5070 scope.go:117] "RemoveContainer" containerID="42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84" Dec 13 04:02:24 crc kubenswrapper[5070]: E1213 04:02:24.733821 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84\": container with ID starting with 42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84 not found: ID does not exist" containerID="42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.733840 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84"} err="failed to get container status \"42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84\": rpc error: code = NotFound desc = could not find container \"42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84\": container with ID starting with 42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84 not found: ID does not exist" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.733854 5070 scope.go:117] "RemoveContainer" containerID="99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.737186 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a"} err="failed to get container status \"99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a\": rpc error: code = NotFound desc = could not find container \"99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a\": container with ID starting with 99b6decb20f2243abe93558e7599bc237b8ceeeba7e952cb4942c436e37a394a not found: ID does not exist" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.737231 5070 scope.go:117] "RemoveContainer" containerID="42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.745146 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84"} err="failed to get container status \"42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84\": rpc error: code = NotFound desc = could not find container \"42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84\": container with ID starting with 42467443eb8771f7f71de6c980dc5ab5436bd0777a774652a08b1efc7990dd84 not found: ID does not exist" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.745194 5070 scope.go:117] "RemoveContainer" containerID="8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.766407 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.788279 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.801964 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.808109 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.812293 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.813397 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.818919 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.846853 5070 scope.go:117] "RemoveContainer" containerID="b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.871730 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-scripts\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.872023 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr9nx\" (UniqueName: \"kubernetes.io/projected/69a11b54-bec4-4a0e-b498-94747f0c3e37-kube-api-access-vr9nx\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.872062 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/69a11b54-bec4-4a0e-b498-94747f0c3e37-ceph\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.872088 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.872105 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/69a11b54-bec4-4a0e-b498-94747f0c3e37-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.872123 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.872207 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-config-data\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.872231 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.872259 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69a11b54-bec4-4a0e-b498-94747f0c3e37-logs\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.906435 5070 scope.go:117] "RemoveContainer" containerID="8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3" Dec 13 04:02:24 crc kubenswrapper[5070]: E1213 04:02:24.906794 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3\": container with ID starting with 8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3 not found: ID does not exist" containerID="8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.906822 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3"} err="failed to get container status \"8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3\": rpc error: code = NotFound desc = could not find container \"8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3\": container with ID starting with 8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3 not found: ID does not exist" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.906842 5070 scope.go:117] "RemoveContainer" containerID="b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a" Dec 13 04:02:24 crc kubenswrapper[5070]: E1213 04:02:24.907020 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a\": container with ID starting with b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a not found: ID does not exist" containerID="b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.907042 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a"} err="failed to get container status \"b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a\": rpc error: code = NotFound desc = could not find container \"b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a\": container with ID starting with b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a not found: ID does not exist" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.907056 5070 scope.go:117] "RemoveContainer" containerID="8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.907237 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3"} err="failed to get container status \"8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3\": rpc error: code = NotFound desc = could not find container \"8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3\": container with ID starting with 8cec328d07c4b141cf34c48203d3d85a498a21e4dcacb16659af39bbd950ebb3 not found: ID does not exist" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.907262 5070 scope.go:117] "RemoveContainer" containerID="b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.907602 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a"} err="failed to get container status \"b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a\": rpc error: code = NotFound desc = could not find container \"b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a\": container with ID starting with b34f4c7e2430e3b7243a9d0049351403ed14a021418af5119269e3598f4c5a2a not found: ID does not exist" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.973738 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmrtw\" (UniqueName: \"kubernetes.io/projected/48089201-da81-4de1-be8f-3a832ff67774-kube-api-access-pmrtw\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.973811 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48089201-da81-4de1-be8f-3a832ff67774-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.973839 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/48089201-da81-4de1-be8f-3a832ff67774-ceph\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.973876 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.973891 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48089201-da81-4de1-be8f-3a832ff67774-logs\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.973915 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-config-data\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.973937 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.973985 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69a11b54-bec4-4a0e-b498-94747f0c3e37-logs\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974012 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-config-data\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974053 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-scripts\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974080 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr9nx\" (UniqueName: \"kubernetes.io/projected/69a11b54-bec4-4a0e-b498-94747f0c3e37-kube-api-access-vr9nx\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974123 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-scripts\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974151 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974168 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/69a11b54-bec4-4a0e-b498-94747f0c3e37-ceph\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974188 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974207 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974224 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/69a11b54-bec4-4a0e-b498-94747f0c3e37-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974254 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.974388 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.975283 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69a11b54-bec4-4a0e-b498-94747f0c3e37-logs\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.976202 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/69a11b54-bec4-4a0e-b498-94747f0c3e37-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.981663 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-config-data\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.982654 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.991418 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-scripts\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.991657 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69a11b54-bec4-4a0e-b498-94747f0c3e37-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:24 crc kubenswrapper[5070]: I1213 04:02:24.994023 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr9nx\" (UniqueName: \"kubernetes.io/projected/69a11b54-bec4-4a0e-b498-94747f0c3e37-kube-api-access-vr9nx\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.004226 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/69a11b54-bec4-4a0e-b498-94747f0c3e37-ceph\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.010233 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"69a11b54-bec4-4a0e-b498-94747f0c3e37\") " pod="openstack/glance-default-internal-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.075835 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/48089201-da81-4de1-be8f-3a832ff67774-ceph\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.075910 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.075929 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48089201-da81-4de1-be8f-3a832ff67774-logs\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.075978 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-config-data\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.076026 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-scripts\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.076051 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.076073 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.076132 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmrtw\" (UniqueName: \"kubernetes.io/projected/48089201-da81-4de1-be8f-3a832ff67774-kube-api-access-pmrtw\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.076172 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48089201-da81-4de1-be8f-3a832ff67774-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.076653 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48089201-da81-4de1-be8f-3a832ff67774-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.076770 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.079963 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-scripts\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.080009 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/48089201-da81-4de1-be8f-3a832ff67774-ceph\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.080324 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48089201-da81-4de1-be8f-3a832ff67774-logs\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.086015 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.087306 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.092833 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48089201-da81-4de1-be8f-3a832ff67774-config-data\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.097536 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmrtw\" (UniqueName: \"kubernetes.io/projected/48089201-da81-4de1-be8f-3a832ff67774-kube-api-access-pmrtw\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.109988 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.118378 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"48089201-da81-4de1-be8f-3a832ff67774\") " pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.126509 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.842177 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 04:02:25 crc kubenswrapper[5070]: I1213 04:02:25.945005 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 04:02:26 crc kubenswrapper[5070]: I1213 04:02:26.212089 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d" path="/var/lib/kubelet/pods/3fbb90b2-2bcf-4b11-a65a-8ac1bd09462d/volumes" Dec 13 04:02:26 crc kubenswrapper[5070]: I1213 04:02:26.213861 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89ec3336-0c2e-4377-9cb8-fd3ded3af7d0" path="/var/lib/kubelet/pods/89ec3336-0c2e-4377-9cb8-fd3ded3af7d0/volumes" Dec 13 04:02:26 crc kubenswrapper[5070]: I1213 04:02:26.656462 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"69a11b54-bec4-4a0e-b498-94747f0c3e37","Type":"ContainerStarted","Data":"4f50c9b2e8daf2cb92f2e8530b7cc79e3687447ba7ac8ad11320413ce76ee7c4"} Dec 13 04:02:26 crc kubenswrapper[5070]: I1213 04:02:26.658671 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"48089201-da81-4de1-be8f-3a832ff67774","Type":"ContainerStarted","Data":"66b58fe9bbdb79a6e2320b6f667096ba59facabb3269f707e00939c9a1554931"} Dec 13 04:02:27 crc kubenswrapper[5070]: I1213 04:02:27.680049 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"69a11b54-bec4-4a0e-b498-94747f0c3e37","Type":"ContainerStarted","Data":"2d69ce8bdc6a44819ff5c5b0800d116ad13d9423c20da76cdb8e25c86321164d"} Dec 13 04:02:27 crc kubenswrapper[5070]: I1213 04:02:27.681622 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"48089201-da81-4de1-be8f-3a832ff67774","Type":"ContainerStarted","Data":"55cd3418369fb4428d856ba4a97ad52e693ede655f634f18b38818625a515f54"} Dec 13 04:02:29 crc kubenswrapper[5070]: I1213 04:02:29.092013 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Dec 13 04:02:29 crc kubenswrapper[5070]: I1213 04:02:29.123691 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Dec 13 04:02:29 crc kubenswrapper[5070]: I1213 04:02:29.177808 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-19fc-account-create-x6vnv"] Dec 13 04:02:29 crc kubenswrapper[5070]: I1213 04:02:29.189586 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-19fc-account-create-x6vnv" Dec 13 04:02:29 crc kubenswrapper[5070]: I1213 04:02:29.194703 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Dec 13 04:02:29 crc kubenswrapper[5070]: I1213 04:02:29.243371 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-19fc-account-create-x6vnv"] Dec 13 04:02:29 crc kubenswrapper[5070]: I1213 04:02:29.266997 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sdw2\" (UniqueName: \"kubernetes.io/projected/0910bd27-a460-49c0-880a-47f2e595337e-kube-api-access-8sdw2\") pod \"manila-19fc-account-create-x6vnv\" (UID: \"0910bd27-a460-49c0-880a-47f2e595337e\") " pod="openstack/manila-19fc-account-create-x6vnv" Dec 13 04:02:29 crc kubenswrapper[5070]: I1213 04:02:29.369648 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sdw2\" (UniqueName: \"kubernetes.io/projected/0910bd27-a460-49c0-880a-47f2e595337e-kube-api-access-8sdw2\") pod \"manila-19fc-account-create-x6vnv\" (UID: \"0910bd27-a460-49c0-880a-47f2e595337e\") " pod="openstack/manila-19fc-account-create-x6vnv" Dec 13 04:02:29 crc kubenswrapper[5070]: I1213 04:02:29.408302 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sdw2\" (UniqueName: \"kubernetes.io/projected/0910bd27-a460-49c0-880a-47f2e595337e-kube-api-access-8sdw2\") pod \"manila-19fc-account-create-x6vnv\" (UID: \"0910bd27-a460-49c0-880a-47f2e595337e\") " pod="openstack/manila-19fc-account-create-x6vnv" Dec 13 04:02:29 crc kubenswrapper[5070]: I1213 04:02:29.521970 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-19fc-account-create-x6vnv" Dec 13 04:02:32 crc kubenswrapper[5070]: I1213 04:02:32.109303 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-19fc-account-create-x6vnv"] Dec 13 04:02:32 crc kubenswrapper[5070]: W1213 04:02:32.113382 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0910bd27_a460_49c0_880a_47f2e595337e.slice/crio-50436db6abdf0494b9ef05d5843869dfaeca0d1ca8c733188fefb98e47456e19 WatchSource:0}: Error finding container 50436db6abdf0494b9ef05d5843869dfaeca0d1ca8c733188fefb98e47456e19: Status 404 returned error can't find the container with id 50436db6abdf0494b9ef05d5843869dfaeca0d1ca8c733188fefb98e47456e19 Dec 13 04:02:32 crc kubenswrapper[5070]: I1213 04:02:32.743212 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-784865f99f-5r8dj" event={"ID":"904c7826-4376-4e6d-a520-a12e30a3495d","Type":"ContainerStarted","Data":"2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92"} Dec 13 04:02:32 crc kubenswrapper[5070]: I1213 04:02:32.745060 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-19fc-account-create-x6vnv" event={"ID":"0910bd27-a460-49c0-880a-47f2e595337e","Type":"ContainerStarted","Data":"50436db6abdf0494b9ef05d5843869dfaeca0d1ca8c733188fefb98e47456e19"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.756422 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64b5b8c565-xr2cz" event={"ID":"b6951934-d07e-4e5a-930c-779502792de9","Type":"ContainerStarted","Data":"dbf836f7f9c91df84a121a6c3c2296591015d0906c17afc70f59dfcb146ac8f7"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.756993 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64b5b8c565-xr2cz" event={"ID":"b6951934-d07e-4e5a-930c-779502792de9","Type":"ContainerStarted","Data":"e518416bd5d27587ddd8e5b118874c57f96850abc00f015a7407c6cc22990f6c"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.756695 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-64b5b8c565-xr2cz" podUID="b6951934-d07e-4e5a-930c-779502792de9" containerName="horizon" containerID="cri-o://dbf836f7f9c91df84a121a6c3c2296591015d0906c17afc70f59dfcb146ac8f7" gracePeriod=30 Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.756645 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-64b5b8c565-xr2cz" podUID="b6951934-d07e-4e5a-930c-779502792de9" containerName="horizon-log" containerID="cri-o://e518416bd5d27587ddd8e5b118874c57f96850abc00f015a7407c6cc22990f6c" gracePeriod=30 Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.761707 5070 generic.go:334] "Generic (PLEG): container finished" podID="0910bd27-a460-49c0-880a-47f2e595337e" containerID="5a277d25ba080dfaec47e89b6a7a40d80918488b53f9b2287ae863096fc8f07d" exitCode=0 Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.761805 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-19fc-account-create-x6vnv" event={"ID":"0910bd27-a460-49c0-880a-47f2e595337e","Type":"ContainerDied","Data":"5a277d25ba080dfaec47e89b6a7a40d80918488b53f9b2287ae863096fc8f07d"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.764530 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8655d596d8-lz82d" event={"ID":"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde","Type":"ContainerStarted","Data":"6b59e141cc23b6b17b97cfa5ce9f971d64aea89c823a53b96ffc8032cd04ebea"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.764575 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-8655d596d8-lz82d" event={"ID":"c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde","Type":"ContainerStarted","Data":"eb75d343f3112d9ad33996797fe96db5560a98468fa3d4ac54c631125680a573"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.770761 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-677fbcc968-jlxpr" event={"ID":"0f20c4db-2943-474f-970d-02d52f185c6e","Type":"ContainerStarted","Data":"3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.770809 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-677fbcc968-jlxpr" event={"ID":"0f20c4db-2943-474f-970d-02d52f185c6e","Type":"ContainerStarted","Data":"d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.773082 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"69a11b54-bec4-4a0e-b498-94747f0c3e37","Type":"ContainerStarted","Data":"a2486e35d714c36db400f83e1d8ef2383a47770f8c247c86c1c7fc6cab1fd287"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.774880 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"48089201-da81-4de1-be8f-3a832ff67774","Type":"ContainerStarted","Data":"fbc345cd43aac5be6276a1de7333b4ec716af35880922aba1b33e940f026775a"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.776599 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-784865f99f-5r8dj" event={"ID":"904c7826-4376-4e6d-a520-a12e30a3495d","Type":"ContainerStarted","Data":"04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e"} Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.776724 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-784865f99f-5r8dj" podUID="904c7826-4376-4e6d-a520-a12e30a3495d" containerName="horizon-log" containerID="cri-o://2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92" gracePeriod=30 Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.776810 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-784865f99f-5r8dj" podUID="904c7826-4376-4e6d-a520-a12e30a3495d" containerName="horizon" containerID="cri-o://04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e" gracePeriod=30 Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.800496 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-64b5b8c565-xr2cz" podStartSLOduration=3.192587755 podStartE2EDuration="14.800471095s" podCreationTimestamp="2025-12-13 04:02:19 +0000 UTC" firstStartedPulling="2025-12-13 04:02:20.142190965 +0000 UTC m=+3032.378034511" lastFinishedPulling="2025-12-13 04:02:31.750074305 +0000 UTC m=+3043.985917851" observedRunningTime="2025-12-13 04:02:33.788048296 +0000 UTC m=+3046.023891842" watchObservedRunningTime="2025-12-13 04:02:33.800471095 +0000 UTC m=+3046.036314661" Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.825977 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.825957871 podStartE2EDuration="9.825957871s" podCreationTimestamp="2025-12-13 04:02:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 04:02:33.821995232 +0000 UTC m=+3046.057838788" watchObservedRunningTime="2025-12-13 04:02:33.825957871 +0000 UTC m=+3046.061801417" Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.847057 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-784865f99f-5r8dj" podStartSLOduration=3.653751782 podStartE2EDuration="14.847041416s" podCreationTimestamp="2025-12-13 04:02:19 +0000 UTC" firstStartedPulling="2025-12-13 04:02:20.492773843 +0000 UTC m=+3032.728617389" lastFinishedPulling="2025-12-13 04:02:31.686063477 +0000 UTC m=+3043.921907023" observedRunningTime="2025-12-13 04:02:33.842944044 +0000 UTC m=+3046.078787590" watchObservedRunningTime="2025-12-13 04:02:33.847041416 +0000 UTC m=+3046.082884962" Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.890748 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-677fbcc968-jlxpr" podStartSLOduration=3.504765454 podStartE2EDuration="11.890728018s" podCreationTimestamp="2025-12-13 04:02:22 +0000 UTC" firstStartedPulling="2025-12-13 04:02:23.368993293 +0000 UTC m=+3035.604836849" lastFinishedPulling="2025-12-13 04:02:31.754955867 +0000 UTC m=+3043.990799413" observedRunningTime="2025-12-13 04:02:33.8666194 +0000 UTC m=+3046.102462946" watchObservedRunningTime="2025-12-13 04:02:33.890728018 +0000 UTC m=+3046.126571564" Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.893891 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-8655d596d8-lz82d" podStartSLOduration=3.645315279 podStartE2EDuration="11.893876684s" podCreationTimestamp="2025-12-13 04:02:22 +0000 UTC" firstStartedPulling="2025-12-13 04:02:23.482875311 +0000 UTC m=+3035.718718857" lastFinishedPulling="2025-12-13 04:02:31.731436716 +0000 UTC m=+3043.967280262" observedRunningTime="2025-12-13 04:02:33.883275525 +0000 UTC m=+3046.119119071" watchObservedRunningTime="2025-12-13 04:02:33.893876684 +0000 UTC m=+3046.129720230" Dec 13 04:02:33 crc kubenswrapper[5070]: I1213 04:02:33.934039 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.934012409 podStartE2EDuration="9.934012409s" podCreationTimestamp="2025-12-13 04:02:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 04:02:33.923182774 +0000 UTC m=+3046.159026320" watchObservedRunningTime="2025-12-13 04:02:33.934012409 +0000 UTC m=+3046.169855955" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.110239 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.110800 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.127970 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.128021 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.144023 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.152964 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.159186 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.182978 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-19fc-account-create-x6vnv" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.219050 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.303268 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sdw2\" (UniqueName: \"kubernetes.io/projected/0910bd27-a460-49c0-880a-47f2e595337e-kube-api-access-8sdw2\") pod \"0910bd27-a460-49c0-880a-47f2e595337e\" (UID: \"0910bd27-a460-49c0-880a-47f2e595337e\") " Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.310894 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0910bd27-a460-49c0-880a-47f2e595337e-kube-api-access-8sdw2" (OuterVolumeSpecName: "kube-api-access-8sdw2") pod "0910bd27-a460-49c0-880a-47f2e595337e" (UID: "0910bd27-a460-49c0-880a-47f2e595337e"). InnerVolumeSpecName "kube-api-access-8sdw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.405944 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sdw2\" (UniqueName: \"kubernetes.io/projected/0910bd27-a460-49c0-880a-47f2e595337e-kube-api-access-8sdw2\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.803556 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-19fc-account-create-x6vnv" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.805428 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-19fc-account-create-x6vnv" event={"ID":"0910bd27-a460-49c0-880a-47f2e595337e","Type":"ContainerDied","Data":"50436db6abdf0494b9ef05d5843869dfaeca0d1ca8c733188fefb98e47456e19"} Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.805494 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50436db6abdf0494b9ef05d5843869dfaeca0d1ca8c733188fefb98e47456e19" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.805522 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.806738 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.806769 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 13 04:02:35 crc kubenswrapper[5070]: I1213 04:02:35.806781 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:37 crc kubenswrapper[5070]: I1213 04:02:37.821656 5070 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 04:02:37 crc kubenswrapper[5070]: I1213 04:02:37.821656 5070 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 04:02:39 crc kubenswrapper[5070]: I1213 04:02:39.323678 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:39 crc kubenswrapper[5070]: I1213 04:02:39.325485 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 13 04:02:39 crc kubenswrapper[5070]: I1213 04:02:39.451781 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:02:39 crc kubenswrapper[5070]: I1213 04:02:39.760591 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:02:39 crc kubenswrapper[5070]: I1213 04:02:39.998854 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-gl79z"] Dec 13 04:02:39 crc kubenswrapper[5070]: E1213 04:02:39.999268 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0910bd27-a460-49c0-880a-47f2e595337e" containerName="mariadb-account-create" Dec 13 04:02:39 crc kubenswrapper[5070]: I1213 04:02:39.999286 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="0910bd27-a460-49c0-880a-47f2e595337e" containerName="mariadb-account-create" Dec 13 04:02:39 crc kubenswrapper[5070]: I1213 04:02:39.999488 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="0910bd27-a460-49c0-880a-47f2e595337e" containerName="mariadb-account-create" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.000129 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.001855 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-k2gsw" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.003028 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.006242 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-config-data\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.006527 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-combined-ca-bundle\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.006649 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktxs5\" (UniqueName: \"kubernetes.io/projected/6f534dac-0129-4309-b670-2dcfd808b721-kube-api-access-ktxs5\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.006713 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-job-config-data\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.017898 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-gl79z"] Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.108198 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-job-config-data\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.108353 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-config-data\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.108406 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-combined-ca-bundle\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.108485 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktxs5\" (UniqueName: \"kubernetes.io/projected/6f534dac-0129-4309-b670-2dcfd808b721-kube-api-access-ktxs5\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.114938 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-config-data\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.115638 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-job-config-data\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.133047 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktxs5\" (UniqueName: \"kubernetes.io/projected/6f534dac-0129-4309-b670-2dcfd808b721-kube-api-access-ktxs5\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.133185 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-combined-ca-bundle\") pod \"manila-db-sync-gl79z\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.140879 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.190674 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 13 04:02:40 crc kubenswrapper[5070]: I1213 04:02:40.341588 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:41 crc kubenswrapper[5070]: I1213 04:02:41.114623 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-gl79z"] Dec 13 04:02:41 crc kubenswrapper[5070]: I1213 04:02:41.862682 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-gl79z" event={"ID":"6f534dac-0129-4309-b670-2dcfd808b721","Type":"ContainerStarted","Data":"19af6ad1cef42510d53752285c2bbc1ac340fe5cdc55b1b505c7f3f1b38bccde"} Dec 13 04:02:42 crc kubenswrapper[5070]: I1213 04:02:42.792796 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:42 crc kubenswrapper[5070]: I1213 04:02:42.793150 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:02:42 crc kubenswrapper[5070]: I1213 04:02:42.919961 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:42 crc kubenswrapper[5070]: I1213 04:02:42.920007 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:02:46 crc kubenswrapper[5070]: I1213 04:02:46.923018 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-gl79z" event={"ID":"6f534dac-0129-4309-b670-2dcfd808b721","Type":"ContainerStarted","Data":"eb71c9717b90de3c4ee25eb9b3359b6ac5ea57f331120c04c155b5ce592bdfcd"} Dec 13 04:02:46 crc kubenswrapper[5070]: I1213 04:02:46.952185 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-gl79z" podStartSLOduration=3.2965959160000002 podStartE2EDuration="7.952166249s" podCreationTimestamp="2025-12-13 04:02:39 +0000 UTC" firstStartedPulling="2025-12-13 04:02:41.122552343 +0000 UTC m=+3053.358395889" lastFinishedPulling="2025-12-13 04:02:45.778122676 +0000 UTC m=+3058.013966222" observedRunningTime="2025-12-13 04:02:46.947736008 +0000 UTC m=+3059.183579564" watchObservedRunningTime="2025-12-13 04:02:46.952166249 +0000 UTC m=+3059.188009815" Dec 13 04:02:52 crc kubenswrapper[5070]: I1213 04:02:52.795482 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-677fbcc968-jlxpr" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.238:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.238:8443: connect: connection refused" Dec 13 04:02:52 crc kubenswrapper[5070]: I1213 04:02:52.917523 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-8655d596d8-lz82d" podUID="c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.239:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.239:8443: connect: connection refused" Dec 13 04:02:58 crc kubenswrapper[5070]: I1213 04:02:58.023689 5070 generic.go:334] "Generic (PLEG): container finished" podID="6f534dac-0129-4309-b670-2dcfd808b721" containerID="eb71c9717b90de3c4ee25eb9b3359b6ac5ea57f331120c04c155b5ce592bdfcd" exitCode=0 Dec 13 04:02:58 crc kubenswrapper[5070]: I1213 04:02:58.023803 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-gl79z" event={"ID":"6f534dac-0129-4309-b670-2dcfd808b721","Type":"ContainerDied","Data":"eb71c9717b90de3c4ee25eb9b3359b6ac5ea57f331120c04c155b5ce592bdfcd"} Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.431945 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-gl79z" Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.554228 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-combined-ca-bundle\") pod \"6f534dac-0129-4309-b670-2dcfd808b721\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.554398 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktxs5\" (UniqueName: \"kubernetes.io/projected/6f534dac-0129-4309-b670-2dcfd808b721-kube-api-access-ktxs5\") pod \"6f534dac-0129-4309-b670-2dcfd808b721\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.554528 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-config-data\") pod \"6f534dac-0129-4309-b670-2dcfd808b721\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.554666 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-job-config-data\") pod \"6f534dac-0129-4309-b670-2dcfd808b721\" (UID: \"6f534dac-0129-4309-b670-2dcfd808b721\") " Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.559904 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f534dac-0129-4309-b670-2dcfd808b721-kube-api-access-ktxs5" (OuterVolumeSpecName: "kube-api-access-ktxs5") pod "6f534dac-0129-4309-b670-2dcfd808b721" (UID: "6f534dac-0129-4309-b670-2dcfd808b721"). InnerVolumeSpecName "kube-api-access-ktxs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.560891 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "6f534dac-0129-4309-b670-2dcfd808b721" (UID: "6f534dac-0129-4309-b670-2dcfd808b721"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.570422 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-config-data" (OuterVolumeSpecName: "config-data") pod "6f534dac-0129-4309-b670-2dcfd808b721" (UID: "6f534dac-0129-4309-b670-2dcfd808b721"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.599728 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f534dac-0129-4309-b670-2dcfd808b721" (UID: "6f534dac-0129-4309-b670-2dcfd808b721"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.657213 5070 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-job-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.657246 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.657256 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktxs5\" (UniqueName: \"kubernetes.io/projected/6f534dac-0129-4309-b670-2dcfd808b721-kube-api-access-ktxs5\") on node \"crc\" DevicePath \"\"" Dec 13 04:02:59 crc kubenswrapper[5070]: I1213 04:02:59.657268 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f534dac-0129-4309-b670-2dcfd808b721-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.047087 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-gl79z" event={"ID":"6f534dac-0129-4309-b670-2dcfd808b721","Type":"ContainerDied","Data":"19af6ad1cef42510d53752285c2bbc1ac340fe5cdc55b1b505c7f3f1b38bccde"} Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.047133 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19af6ad1cef42510d53752285c2bbc1ac340fe5cdc55b1b505c7f3f1b38bccde" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.047185 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-gl79z" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.347039 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 04:03:00 crc kubenswrapper[5070]: E1213 04:03:00.347529 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f534dac-0129-4309-b670-2dcfd808b721" containerName="manila-db-sync" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.347550 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f534dac-0129-4309-b670-2dcfd808b721" containerName="manila-db-sync" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.347774 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f534dac-0129-4309-b670-2dcfd808b721" containerName="manila-db-sync" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.348848 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.354877 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.355000 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.355100 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-k2gsw" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.355155 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.360373 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.372351 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdhvk\" (UniqueName: \"kubernetes.io/projected/626e6c11-d496-4667-a00d-b84fbf1fa0de-kube-api-access-cdhvk\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.372428 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-scripts\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.372536 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.372618 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/626e6c11-d496-4667-a00d-b84fbf1fa0de-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.372661 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.372685 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.456748 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.458302 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.463645 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.475487 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.475565 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.475634 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/626e6c11-d496-4667-a00d-b84fbf1fa0de-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.475662 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-scripts\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.475685 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-ceph\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.475722 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.475747 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.475767 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.476695 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/626e6c11-d496-4667-a00d-b84fbf1fa0de-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.476802 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.476868 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.476953 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdhvk\" (UniqueName: \"kubernetes.io/projected/626e6c11-d496-4667-a00d-b84fbf1fa0de-kube-api-access-cdhvk\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.477056 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-scripts\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.477088 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.477220 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m2bt\" (UniqueName: \"kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-kube-api-access-2m2bt\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.489121 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.489192 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.490473 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.495001 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-scripts\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.495088 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.504850 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdhvk\" (UniqueName: \"kubernetes.io/projected/626e6c11-d496-4667-a00d-b84fbf1fa0de-kube-api-access-cdhvk\") pod \"manila-scheduler-0\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.579915 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.580034 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-scripts\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.580054 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-ceph\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.580089 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.580137 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.580001 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.580237 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.580947 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m2bt\" (UniqueName: \"kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-kube-api-access-2m2bt\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.581269 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.581994 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.584267 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-ceph\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.591435 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-scripts\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.591574 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.591747 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.594318 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.602342 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m2bt\" (UniqueName: \"kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-kube-api-access-2m2bt\") pod \"manila-share-share1-0\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.628137 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69655fd4bf-4hws8"] Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.629981 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.662263 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69655fd4bf-4hws8"] Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.666823 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.686285 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwzzm\" (UniqueName: \"kubernetes.io/projected/b619a970-f642-404b-8617-fc0137ac6d56-kube-api-access-bwzzm\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.686339 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-openstack-edpm-ipam\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.686478 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-dns-svc\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.686513 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-ovsdbserver-nb\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.686545 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-config\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.686576 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-ovsdbserver-sb\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.687513 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.689513 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.693848 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.737025 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.780837 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788255 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data-custom\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788306 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-config\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788339 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-ovsdbserver-sb\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788394 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788427 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7cqk\" (UniqueName: \"kubernetes.io/projected/f0d9570f-77f6-4704-9de6-170c794ebe66-kube-api-access-c7cqk\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788466 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwzzm\" (UniqueName: \"kubernetes.io/projected/b619a970-f642-404b-8617-fc0137ac6d56-kube-api-access-bwzzm\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788481 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-openstack-edpm-ipam\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788528 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788571 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0d9570f-77f6-4704-9de6-170c794ebe66-logs\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788613 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0d9570f-77f6-4704-9de6-170c794ebe66-etc-machine-id\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788627 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-scripts\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788646 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-dns-svc\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.788666 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-ovsdbserver-nb\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.789565 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-ovsdbserver-nb\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.790145 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-config\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.790273 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-ovsdbserver-sb\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.790344 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-dns-svc\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.790824 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b619a970-f642-404b-8617-fc0137ac6d56-openstack-edpm-ipam\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.808731 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwzzm\" (UniqueName: \"kubernetes.io/projected/b619a970-f642-404b-8617-fc0137ac6d56-kube-api-access-bwzzm\") pod \"dnsmasq-dns-69655fd4bf-4hws8\" (UID: \"b619a970-f642-404b-8617-fc0137ac6d56\") " pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.891667 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0d9570f-77f6-4704-9de6-170c794ebe66-etc-machine-id\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.892114 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-scripts\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.894146 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data-custom\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.894304 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.894362 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7cqk\" (UniqueName: \"kubernetes.io/projected/f0d9570f-77f6-4704-9de6-170c794ebe66-kube-api-access-c7cqk\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.894541 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.894634 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0d9570f-77f6-4704-9de6-170c794ebe66-logs\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.895494 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0d9570f-77f6-4704-9de6-170c794ebe66-logs\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.895922 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0d9570f-77f6-4704-9de6-170c794ebe66-etc-machine-id\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.900120 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.901305 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.902602 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-scripts\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.906488 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data-custom\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.924036 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7cqk\" (UniqueName: \"kubernetes.io/projected/f0d9570f-77f6-4704-9de6-170c794ebe66-kube-api-access-c7cqk\") pod \"manila-api-0\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " pod="openstack/manila-api-0" Dec 13 04:03:00 crc kubenswrapper[5070]: I1213 04:03:00.980965 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:01 crc kubenswrapper[5070]: I1213 04:03:01.093917 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 04:03:01 crc kubenswrapper[5070]: I1213 04:03:01.237249 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 04:03:01 crc kubenswrapper[5070]: W1213 04:03:01.472843 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb619a970_f642_404b_8617_fc0137ac6d56.slice/crio-cd74ddb3658e584dccf8ee7369d58f92b4b4be9e21b30b2aaed094a253dca5d1 WatchSource:0}: Error finding container cd74ddb3658e584dccf8ee7369d58f92b4b4be9e21b30b2aaed094a253dca5d1: Status 404 returned error can't find the container with id cd74ddb3658e584dccf8ee7369d58f92b4b4be9e21b30b2aaed094a253dca5d1 Dec 13 04:03:01 crc kubenswrapper[5070]: I1213 04:03:01.481812 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69655fd4bf-4hws8"] Dec 13 04:03:01 crc kubenswrapper[5070]: I1213 04:03:01.500146 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 04:03:01 crc kubenswrapper[5070]: W1213 04:03:01.540148 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod709384c1_56bc_451b_939e_910783412f47.slice/crio-b905105736269a59af08801fc47209d83fc567d229631cec5f5b6ceb4c6574b8 WatchSource:0}: Error finding container b905105736269a59af08801fc47209d83fc567d229631cec5f5b6ceb4c6574b8: Status 404 returned error can't find the container with id b905105736269a59af08801fc47209d83fc567d229631cec5f5b6ceb4c6574b8 Dec 13 04:03:02 crc kubenswrapper[5070]: I1213 04:03:02.280030 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"709384c1-56bc-451b-939e-910783412f47","Type":"ContainerStarted","Data":"b905105736269a59af08801fc47209d83fc567d229631cec5f5b6ceb4c6574b8"} Dec 13 04:03:02 crc kubenswrapper[5070]: I1213 04:03:02.282721 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" event={"ID":"b619a970-f642-404b-8617-fc0137ac6d56","Type":"ContainerStarted","Data":"cd74ddb3658e584dccf8ee7369d58f92b4b4be9e21b30b2aaed094a253dca5d1"} Dec 13 04:03:02 crc kubenswrapper[5070]: I1213 04:03:02.284306 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"626e6c11-d496-4667-a00d-b84fbf1fa0de","Type":"ContainerStarted","Data":"09c312ab4a8b3d99ad28eafc973531d44919522a4c41604e9472b9a0a32f932f"} Dec 13 04:03:02 crc kubenswrapper[5070]: I1213 04:03:02.403601 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 13 04:03:02 crc kubenswrapper[5070]: W1213 04:03:02.604491 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0d9570f_77f6_4704_9de6_170c794ebe66.slice/crio-80e20c904f72d6e7cb8ac054f0594891a02226d53efdebc9f0aac1883b5852f3 WatchSource:0}: Error finding container 80e20c904f72d6e7cb8ac054f0594891a02226d53efdebc9f0aac1883b5852f3: Status 404 returned error can't find the container with id 80e20c904f72d6e7cb8ac054f0594891a02226d53efdebc9f0aac1883b5852f3 Dec 13 04:03:03 crc kubenswrapper[5070]: I1213 04:03:03.294976 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f0d9570f-77f6-4704-9de6-170c794ebe66","Type":"ContainerStarted","Data":"4eaec48b8f58af63835e697261e9729140d85cb757ebec84f31e233e59542860"} Dec 13 04:03:03 crc kubenswrapper[5070]: I1213 04:03:03.295303 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f0d9570f-77f6-4704-9de6-170c794ebe66","Type":"ContainerStarted","Data":"80e20c904f72d6e7cb8ac054f0594891a02226d53efdebc9f0aac1883b5852f3"} Dec 13 04:03:03 crc kubenswrapper[5070]: I1213 04:03:03.305166 5070 generic.go:334] "Generic (PLEG): container finished" podID="b619a970-f642-404b-8617-fc0137ac6d56" containerID="48cf690fc5b73582f5ab822ce7a09aa9c33fe8ca00c15c0de70e2657fbafb014" exitCode=0 Dec 13 04:03:03 crc kubenswrapper[5070]: I1213 04:03:03.305206 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" event={"ID":"b619a970-f642-404b-8617-fc0137ac6d56","Type":"ContainerDied","Data":"48cf690fc5b73582f5ab822ce7a09aa9c33fe8ca00c15c0de70e2657fbafb014"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.373778 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f0d9570f-77f6-4704-9de6-170c794ebe66","Type":"ContainerStarted","Data":"b16592ed3268d4ced25e862ee2b84e187d373c559daf7d7c3ba107c79330b8a0"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.375879 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.425916 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.431735 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.450935 5070 generic.go:334] "Generic (PLEG): container finished" podID="904c7826-4376-4e6d-a520-a12e30a3495d" containerID="04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e" exitCode=137 Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.450960 5070 generic.go:334] "Generic (PLEG): container finished" podID="904c7826-4376-4e6d-a520-a12e30a3495d" containerID="2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92" exitCode=137 Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.451010 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-784865f99f-5r8dj" event={"ID":"904c7826-4376-4e6d-a520-a12e30a3495d","Type":"ContainerDied","Data":"04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.451046 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-784865f99f-5r8dj" event={"ID":"904c7826-4376-4e6d-a520-a12e30a3495d","Type":"ContainerDied","Data":"2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.451067 5070 scope.go:117] "RemoveContainer" containerID="04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.456328 5070 generic.go:334] "Generic (PLEG): container finished" podID="b6951934-d07e-4e5a-930c-779502792de9" containerID="dbf836f7f9c91df84a121a6c3c2296591015d0906c17afc70f59dfcb146ac8f7" exitCode=137 Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.456356 5070 generic.go:334] "Generic (PLEG): container finished" podID="b6951934-d07e-4e5a-930c-779502792de9" containerID="e518416bd5d27587ddd8e5b118874c57f96850abc00f015a7407c6cc22990f6c" exitCode=137 Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.456406 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64b5b8c565-xr2cz" event={"ID":"b6951934-d07e-4e5a-930c-779502792de9","Type":"ContainerDied","Data":"dbf836f7f9c91df84a121a6c3c2296591015d0906c17afc70f59dfcb146ac8f7"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.456435 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64b5b8c565-xr2cz" event={"ID":"b6951934-d07e-4e5a-930c-779502792de9","Type":"ContainerDied","Data":"e518416bd5d27587ddd8e5b118874c57f96850abc00f015a7407c6cc22990f6c"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.460293 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=4.46027675 podStartE2EDuration="4.46027675s" podCreationTimestamp="2025-12-13 04:03:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 04:03:04.442323691 +0000 UTC m=+3076.678167227" watchObservedRunningTime="2025-12-13 04:03:04.46027675 +0000 UTC m=+3076.696120296" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.475681 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" event={"ID":"b619a970-f642-404b-8617-fc0137ac6d56","Type":"ContainerStarted","Data":"6bee9f3fd8e02785537a4490e8a60756bfbf4ca38e387a98bad02f4f0852c33b"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.476810 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.486745 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"626e6c11-d496-4667-a00d-b84fbf1fa0de","Type":"ContainerStarted","Data":"e455d1047ce275af0e3f76ccfa09976c6827783456f195aabc937fe1c46bfbbf"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.486798 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"626e6c11-d496-4667-a00d-b84fbf1fa0de","Type":"ContainerStarted","Data":"58d5f36aed0a9999c2fd56640fa60ab1358bded52e6be73db27baad2f77aae5f"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.497544 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.519283 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24mcs\" (UniqueName: \"kubernetes.io/projected/904c7826-4376-4e6d-a520-a12e30a3495d-kube-api-access-24mcs\") pod \"904c7826-4376-4e6d-a520-a12e30a3495d\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.519508 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-config-data\") pod \"904c7826-4376-4e6d-a520-a12e30a3495d\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.519579 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-scripts\") pod \"904c7826-4376-4e6d-a520-a12e30a3495d\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.519615 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/904c7826-4376-4e6d-a520-a12e30a3495d-logs\") pod \"904c7826-4376-4e6d-a520-a12e30a3495d\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.519652 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/904c7826-4376-4e6d-a520-a12e30a3495d-horizon-secret-key\") pod \"904c7826-4376-4e6d-a520-a12e30a3495d\" (UID: \"904c7826-4376-4e6d-a520-a12e30a3495d\") " Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.526501 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" podStartSLOduration=4.526477388 podStartE2EDuration="4.526477388s" podCreationTimestamp="2025-12-13 04:03:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 04:03:04.505768882 +0000 UTC m=+3076.741612428" watchObservedRunningTime="2025-12-13 04:03:04.526477388 +0000 UTC m=+3076.762320944" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.529627 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/904c7826-4376-4e6d-a520-a12e30a3495d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "904c7826-4376-4e6d-a520-a12e30a3495d" (UID: "904c7826-4376-4e6d-a520-a12e30a3495d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.529945 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/904c7826-4376-4e6d-a520-a12e30a3495d-logs" (OuterVolumeSpecName: "logs") pod "904c7826-4376-4e6d-a520-a12e30a3495d" (UID: "904c7826-4376-4e6d-a520-a12e30a3495d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.563351 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/904c7826-4376-4e6d-a520-a12e30a3495d-kube-api-access-24mcs" (OuterVolumeSpecName: "kube-api-access-24mcs") pod "904c7826-4376-4e6d-a520-a12e30a3495d" (UID: "904c7826-4376-4e6d-a520-a12e30a3495d"). InnerVolumeSpecName "kube-api-access-24mcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.571913 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-config-data" (OuterVolumeSpecName: "config-data") pod "904c7826-4376-4e6d-a520-a12e30a3495d" (UID: "904c7826-4376-4e6d-a520-a12e30a3495d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.582239 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-scripts" (OuterVolumeSpecName: "scripts") pod "904c7826-4376-4e6d-a520-a12e30a3495d" (UID: "904c7826-4376-4e6d-a520-a12e30a3495d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.594960 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.986336554 podStartE2EDuration="4.594938536s" podCreationTimestamp="2025-12-13 04:03:00 +0000 UTC" firstStartedPulling="2025-12-13 04:03:01.252370379 +0000 UTC m=+3073.488213925" lastFinishedPulling="2025-12-13 04:03:02.860972361 +0000 UTC m=+3075.096815907" observedRunningTime="2025-12-13 04:03:04.560044874 +0000 UTC m=+3076.795888420" watchObservedRunningTime="2025-12-13 04:03:04.594938536 +0000 UTC m=+3076.830782082" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.623318 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-config-data\") pod \"b6951934-d07e-4e5a-930c-779502792de9\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.624745 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b6951934-d07e-4e5a-930c-779502792de9-horizon-secret-key\") pod \"b6951934-d07e-4e5a-930c-779502792de9\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.624797 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6951934-d07e-4e5a-930c-779502792de9-logs\") pod \"b6951934-d07e-4e5a-930c-779502792de9\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.630285 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6951934-d07e-4e5a-930c-779502792de9-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "b6951934-d07e-4e5a-930c-779502792de9" (UID: "b6951934-d07e-4e5a-930c-779502792de9"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.630742 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6951934-d07e-4e5a-930c-779502792de9-logs" (OuterVolumeSpecName: "logs") pod "b6951934-d07e-4e5a-930c-779502792de9" (UID: "b6951934-d07e-4e5a-930c-779502792de9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.634920 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-scripts\") pod \"b6951934-d07e-4e5a-930c-779502792de9\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.635080 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pb68n\" (UniqueName: \"kubernetes.io/projected/b6951934-d07e-4e5a-930c-779502792de9-kube-api-access-pb68n\") pod \"b6951934-d07e-4e5a-930c-779502792de9\" (UID: \"b6951934-d07e-4e5a-930c-779502792de9\") " Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.636139 5070 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/904c7826-4376-4e6d-a520-a12e30a3495d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.636159 5070 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b6951934-d07e-4e5a-930c-779502792de9-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.636172 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6951934-d07e-4e5a-930c-779502792de9-logs\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.636184 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24mcs\" (UniqueName: \"kubernetes.io/projected/904c7826-4376-4e6d-a520-a12e30a3495d-kube-api-access-24mcs\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.636197 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.636207 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/904c7826-4376-4e6d-a520-a12e30a3495d-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.636217 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/904c7826-4376-4e6d-a520-a12e30a3495d-logs\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.646892 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6951934-d07e-4e5a-930c-779502792de9-kube-api-access-pb68n" (OuterVolumeSpecName: "kube-api-access-pb68n") pod "b6951934-d07e-4e5a-930c-779502792de9" (UID: "b6951934-d07e-4e5a-930c-779502792de9"). InnerVolumeSpecName "kube-api-access-pb68n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.683265 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-scripts" (OuterVolumeSpecName: "scripts") pod "b6951934-d07e-4e5a-930c-779502792de9" (UID: "b6951934-d07e-4e5a-930c-779502792de9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.691908 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-config-data" (OuterVolumeSpecName: "config-data") pod "b6951934-d07e-4e5a-930c-779502792de9" (UID: "b6951934-d07e-4e5a-930c-779502792de9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.748871 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.748901 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pb68n\" (UniqueName: \"kubernetes.io/projected/b6951934-d07e-4e5a-930c-779502792de9-kube-api-access-pb68n\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.748911 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6951934-d07e-4e5a-930c-779502792de9-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.775097 5070 scope.go:117] "RemoveContainer" containerID="2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.800157 5070 scope.go:117] "RemoveContainer" containerID="04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e" Dec 13 04:03:05 crc kubenswrapper[5070]: E1213 04:03:04.800836 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e\": container with ID starting with 04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e not found: ID does not exist" containerID="04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.800884 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e"} err="failed to get container status \"04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e\": rpc error: code = NotFound desc = could not find container \"04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e\": container with ID starting with 04386b664fd4f6609da560517be9f1965e9ef293cdc93642c402a0306506a65e not found: ID does not exist" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.800919 5070 scope.go:117] "RemoveContainer" containerID="2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92" Dec 13 04:03:05 crc kubenswrapper[5070]: E1213 04:03:04.801990 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92\": container with ID starting with 2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92 not found: ID does not exist" containerID="2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:04.802031 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92"} err="failed to get container status \"2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92\": rpc error: code = NotFound desc = could not find container \"2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92\": container with ID starting with 2c762c0a8d94ff654466d835ebe25ead94a96ff34f5a32b4f4aff15c04577e92 not found: ID does not exist" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.514529 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64b5b8c565-xr2cz" event={"ID":"b6951934-d07e-4e5a-930c-779502792de9","Type":"ContainerDied","Data":"43bb0bf2405a3d6fc96629b7d8f0949af48b3dd6ff260f238a9a64c178f9513a"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.515028 5070 scope.go:117] "RemoveContainer" containerID="dbf836f7f9c91df84a121a6c3c2296591015d0906c17afc70f59dfcb146ac8f7" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.514750 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64b5b8c565-xr2cz" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.538196 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-784865f99f-5r8dj" event={"ID":"904c7826-4376-4e6d-a520-a12e30a3495d","Type":"ContainerDied","Data":"49ae469fb515ab561a97ee59ece123c834629a033dc446309bc22f85e049ee1e"} Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.538275 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-784865f99f-5r8dj" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.584881 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64b5b8c565-xr2cz"] Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.612368 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-64b5b8c565-xr2cz"] Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.656642 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-784865f99f-5r8dj"] Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.668732 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-784865f99f-5r8dj"] Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.767479 5070 scope.go:117] "RemoveContainer" containerID="e518416bd5d27587ddd8e5b118874c57f96850abc00f015a7407c6cc22990f6c" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.786012 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:03:05 crc kubenswrapper[5070]: I1213 04:03:05.869958 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:03:06 crc kubenswrapper[5070]: I1213 04:03:06.179110 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="904c7826-4376-4e6d-a520-a12e30a3495d" path="/var/lib/kubelet/pods/904c7826-4376-4e6d-a520-a12e30a3495d/volumes" Dec 13 04:03:06 crc kubenswrapper[5070]: I1213 04:03:06.179774 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6951934-d07e-4e5a-930c-779502792de9" path="/var/lib/kubelet/pods/b6951934-d07e-4e5a-930c-779502792de9/volumes" Dec 13 04:03:06 crc kubenswrapper[5070]: I1213 04:03:06.548140 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="f0d9570f-77f6-4704-9de6-170c794ebe66" containerName="manila-api" containerID="cri-o://b16592ed3268d4ced25e862ee2b84e187d373c559daf7d7c3ba107c79330b8a0" gracePeriod=30 Dec 13 04:03:06 crc kubenswrapper[5070]: I1213 04:03:06.548140 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="f0d9570f-77f6-4704-9de6-170c794ebe66" containerName="manila-api-log" containerID="cri-o://4eaec48b8f58af63835e697261e9729140d85cb757ebec84f31e233e59542860" gracePeriod=30 Dec 13 04:03:07 crc kubenswrapper[5070]: I1213 04:03:07.573602 5070 generic.go:334] "Generic (PLEG): container finished" podID="f0d9570f-77f6-4704-9de6-170c794ebe66" containerID="4eaec48b8f58af63835e697261e9729140d85cb757ebec84f31e233e59542860" exitCode=143 Dec 13 04:03:07 crc kubenswrapper[5070]: I1213 04:03:07.573803 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f0d9570f-77f6-4704-9de6-170c794ebe66","Type":"ContainerDied","Data":"4eaec48b8f58af63835e697261e9729140d85cb757ebec84f31e233e59542860"} Dec 13 04:03:07 crc kubenswrapper[5070]: I1213 04:03:07.875078 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-8655d596d8-lz82d" Dec 13 04:03:07 crc kubenswrapper[5070]: I1213 04:03:07.893034 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:03:07 crc kubenswrapper[5070]: I1213 04:03:07.997808 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-677fbcc968-jlxpr"] Dec 13 04:03:08 crc kubenswrapper[5070]: I1213 04:03:08.582575 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-677fbcc968-jlxpr" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon-log" containerID="cri-o://d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2" gracePeriod=30 Dec 13 04:03:08 crc kubenswrapper[5070]: I1213 04:03:08.583295 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-677fbcc968-jlxpr" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon" containerID="cri-o://3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db" gracePeriod=30 Dec 13 04:03:09 crc kubenswrapper[5070]: I1213 04:03:09.592741 5070 generic.go:334] "Generic (PLEG): container finished" podID="f0d9570f-77f6-4704-9de6-170c794ebe66" containerID="b16592ed3268d4ced25e862ee2b84e187d373c559daf7d7c3ba107c79330b8a0" exitCode=0 Dec 13 04:03:09 crc kubenswrapper[5070]: I1213 04:03:09.592833 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f0d9570f-77f6-4704-9de6-170c794ebe66","Type":"ContainerDied","Data":"b16592ed3268d4ced25e862ee2b84e187d373c559daf7d7c3ba107c79330b8a0"} Dec 13 04:03:10 crc kubenswrapper[5070]: I1213 04:03:10.668054 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Dec 13 04:03:10 crc kubenswrapper[5070]: I1213 04:03:10.984743 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-69655fd4bf-4hws8" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.048204 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-6vl2v"] Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.048529 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" podUID="ffe09009-9b3f-4256-b12a-a4ee47c59d45" containerName="dnsmasq-dns" containerID="cri-o://af437229b7604d90d849225a5a7cd682b4b13b89ae88322aaca5288dc4ab6fd0" gracePeriod=10 Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.101345 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.228761 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data-custom\") pod \"f0d9570f-77f6-4704-9de6-170c794ebe66\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.230002 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7cqk\" (UniqueName: \"kubernetes.io/projected/f0d9570f-77f6-4704-9de6-170c794ebe66-kube-api-access-c7cqk\") pod \"f0d9570f-77f6-4704-9de6-170c794ebe66\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.230045 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data\") pod \"f0d9570f-77f6-4704-9de6-170c794ebe66\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.230136 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0d9570f-77f6-4704-9de6-170c794ebe66-etc-machine-id\") pod \"f0d9570f-77f6-4704-9de6-170c794ebe66\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.230160 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-combined-ca-bundle\") pod \"f0d9570f-77f6-4704-9de6-170c794ebe66\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.230190 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0d9570f-77f6-4704-9de6-170c794ebe66-logs\") pod \"f0d9570f-77f6-4704-9de6-170c794ebe66\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.230207 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-scripts\") pod \"f0d9570f-77f6-4704-9de6-170c794ebe66\" (UID: \"f0d9570f-77f6-4704-9de6-170c794ebe66\") " Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.231580 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0d9570f-77f6-4704-9de6-170c794ebe66-logs" (OuterVolumeSpecName: "logs") pod "f0d9570f-77f6-4704-9de6-170c794ebe66" (UID: "f0d9570f-77f6-4704-9de6-170c794ebe66"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.231644 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f0d9570f-77f6-4704-9de6-170c794ebe66-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f0d9570f-77f6-4704-9de6-170c794ebe66" (UID: "f0d9570f-77f6-4704-9de6-170c794ebe66"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.237583 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f0d9570f-77f6-4704-9de6-170c794ebe66" (UID: "f0d9570f-77f6-4704-9de6-170c794ebe66"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.237617 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-scripts" (OuterVolumeSpecName: "scripts") pod "f0d9570f-77f6-4704-9de6-170c794ebe66" (UID: "f0d9570f-77f6-4704-9de6-170c794ebe66"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.258610 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0d9570f-77f6-4704-9de6-170c794ebe66-kube-api-access-c7cqk" (OuterVolumeSpecName: "kube-api-access-c7cqk") pod "f0d9570f-77f6-4704-9de6-170c794ebe66" (UID: "f0d9570f-77f6-4704-9de6-170c794ebe66"). InnerVolumeSpecName "kube-api-access-c7cqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.286200 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0d9570f-77f6-4704-9de6-170c794ebe66" (UID: "f0d9570f-77f6-4704-9de6-170c794ebe66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.330016 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data" (OuterVolumeSpecName: "config-data") pod "f0d9570f-77f6-4704-9de6-170c794ebe66" (UID: "f0d9570f-77f6-4704-9de6-170c794ebe66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.331619 5070 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0d9570f-77f6-4704-9de6-170c794ebe66-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.331640 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.331652 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0d9570f-77f6-4704-9de6-170c794ebe66-logs\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.331661 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.331668 5070 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.331679 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7cqk\" (UniqueName: \"kubernetes.io/projected/f0d9570f-77f6-4704-9de6-170c794ebe66-kube-api-access-c7cqk\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.331690 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0d9570f-77f6-4704-9de6-170c794ebe66-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.390291 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.391597 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="sg-core" containerID="cri-o://22cf25b51e02a9782bce2e80e4af85b93bfc55609ae89cbc2d9d3ef418b8908a" gracePeriod=30 Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.391621 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="proxy-httpd" containerID="cri-o://4bf21c7eaa6cb8982af7d872506ff0e69122e8ab58dbaccb40a8dc4f683b05f3" gracePeriod=30 Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.391752 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="ceilometer-notification-agent" containerID="cri-o://94082177bcb0ed710678b98bb5f42bdd8ed78ba2c8769ec5466eee631cb71cfb" gracePeriod=30 Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.391796 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="ceilometer-central-agent" containerID="cri-o://158d54d24f96255ea9ac0e5e391f70e01b4abd627af90260f0f30443b4dac8dd" gracePeriod=30 Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.616532 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f0d9570f-77f6-4704-9de6-170c794ebe66","Type":"ContainerDied","Data":"80e20c904f72d6e7cb8ac054f0594891a02226d53efdebc9f0aac1883b5852f3"} Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.616581 5070 scope.go:117] "RemoveContainer" containerID="b16592ed3268d4ced25e862ee2b84e187d373c559daf7d7c3ba107c79330b8a0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.616708 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.619350 5070 generic.go:334] "Generic (PLEG): container finished" podID="ffe09009-9b3f-4256-b12a-a4ee47c59d45" containerID="af437229b7604d90d849225a5a7cd682b4b13b89ae88322aaca5288dc4ab6fd0" exitCode=0 Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.619401 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" event={"ID":"ffe09009-9b3f-4256-b12a-a4ee47c59d45","Type":"ContainerDied","Data":"af437229b7604d90d849225a5a7cd682b4b13b89ae88322aaca5288dc4ab6fd0"} Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.623891 5070 generic.go:334] "Generic (PLEG): container finished" podID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerID="4bf21c7eaa6cb8982af7d872506ff0e69122e8ab58dbaccb40a8dc4f683b05f3" exitCode=0 Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.623929 5070 generic.go:334] "Generic (PLEG): container finished" podID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerID="22cf25b51e02a9782bce2e80e4af85b93bfc55609ae89cbc2d9d3ef418b8908a" exitCode=2 Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.624149 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ae7d2789-ba9f-497c-a444-a6a973ae174d","Type":"ContainerDied","Data":"4bf21c7eaa6cb8982af7d872506ff0e69122e8ab58dbaccb40a8dc4f683b05f3"} Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.624179 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ae7d2789-ba9f-497c-a444-a6a973ae174d","Type":"ContainerDied","Data":"22cf25b51e02a9782bce2e80e4af85b93bfc55609ae89cbc2d9d3ef418b8908a"} Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.676995 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.693529 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.698739 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Dec 13 04:03:11 crc kubenswrapper[5070]: E1213 04:03:11.699117 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6951934-d07e-4e5a-930c-779502792de9" containerName="horizon" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699130 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6951934-d07e-4e5a-930c-779502792de9" containerName="horizon" Dec 13 04:03:11 crc kubenswrapper[5070]: E1213 04:03:11.699144 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0d9570f-77f6-4704-9de6-170c794ebe66" containerName="manila-api" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699151 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0d9570f-77f6-4704-9de6-170c794ebe66" containerName="manila-api" Dec 13 04:03:11 crc kubenswrapper[5070]: E1213 04:03:11.699168 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="904c7826-4376-4e6d-a520-a12e30a3495d" containerName="horizon-log" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699176 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="904c7826-4376-4e6d-a520-a12e30a3495d" containerName="horizon-log" Dec 13 04:03:11 crc kubenswrapper[5070]: E1213 04:03:11.699191 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0d9570f-77f6-4704-9de6-170c794ebe66" containerName="manila-api-log" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699197 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0d9570f-77f6-4704-9de6-170c794ebe66" containerName="manila-api-log" Dec 13 04:03:11 crc kubenswrapper[5070]: E1213 04:03:11.699210 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="904c7826-4376-4e6d-a520-a12e30a3495d" containerName="horizon" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699217 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="904c7826-4376-4e6d-a520-a12e30a3495d" containerName="horizon" Dec 13 04:03:11 crc kubenswrapper[5070]: E1213 04:03:11.699230 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6951934-d07e-4e5a-930c-779502792de9" containerName="horizon-log" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699240 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6951934-d07e-4e5a-930c-779502792de9" containerName="horizon-log" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699444 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="904c7826-4376-4e6d-a520-a12e30a3495d" containerName="horizon-log" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699473 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6951934-d07e-4e5a-930c-779502792de9" containerName="horizon-log" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699495 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="904c7826-4376-4e6d-a520-a12e30a3495d" containerName="horizon" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699506 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0d9570f-77f6-4704-9de6-170c794ebe66" containerName="manila-api-log" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699518 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0d9570f-77f6-4704-9de6-170c794ebe66" containerName="manila-api" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.699528 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6951934-d07e-4e5a-930c-779502792de9" containerName="horizon" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.700571 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.702992 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.703213 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.704358 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.719305 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.840937 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-public-tls-certs\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.841374 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-config-data\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.841404 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-config-data-custom\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.841463 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1e8258a6-caa4-4149-b318-2f985cb3ccdc-etc-machine-id\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.841554 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-scripts\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.841575 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e8258a6-caa4-4149-b318-2f985cb3ccdc-logs\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.841632 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.841673 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-internal-tls-certs\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.841715 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvdp4\" (UniqueName: \"kubernetes.io/projected/1e8258a6-caa4-4149-b318-2f985cb3ccdc-kube-api-access-kvdp4\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.942605 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e8258a6-caa4-4149-b318-2f985cb3ccdc-logs\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.942659 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.942694 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-internal-tls-certs\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.942720 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvdp4\" (UniqueName: \"kubernetes.io/projected/1e8258a6-caa4-4149-b318-2f985cb3ccdc-kube-api-access-kvdp4\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.942827 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-public-tls-certs\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.942842 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-config-data\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.942860 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-config-data-custom\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.942878 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1e8258a6-caa4-4149-b318-2f985cb3ccdc-etc-machine-id\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.942909 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-scripts\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.943613 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e8258a6-caa4-4149-b318-2f985cb3ccdc-logs\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.960379 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1e8258a6-caa4-4149-b318-2f985cb3ccdc-etc-machine-id\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.967560 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-internal-tls-certs\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.987298 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-config-data\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.987926 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:11 crc kubenswrapper[5070]: I1213 04:03:11.995083 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-public-tls-certs\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:12 crc kubenswrapper[5070]: I1213 04:03:12.000041 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-scripts\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:12 crc kubenswrapper[5070]: I1213 04:03:12.000307 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvdp4\" (UniqueName: \"kubernetes.io/projected/1e8258a6-caa4-4149-b318-2f985cb3ccdc-kube-api-access-kvdp4\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:12 crc kubenswrapper[5070]: I1213 04:03:12.000707 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e8258a6-caa4-4149-b318-2f985cb3ccdc-config-data-custom\") pod \"manila-api-0\" (UID: \"1e8258a6-caa4-4149-b318-2f985cb3ccdc\") " pod="openstack/manila-api-0" Dec 13 04:03:12 crc kubenswrapper[5070]: I1213 04:03:12.038380 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 04:03:12 crc kubenswrapper[5070]: I1213 04:03:12.180754 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0d9570f-77f6-4704-9de6-170c794ebe66" path="/var/lib/kubelet/pods/f0d9570f-77f6-4704-9de6-170c794ebe66/volumes" Dec 13 04:03:12 crc kubenswrapper[5070]: I1213 04:03:12.637923 5070 generic.go:334] "Generic (PLEG): container finished" podID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerID="158d54d24f96255ea9ac0e5e391f70e01b4abd627af90260f0f30443b4dac8dd" exitCode=0 Dec 13 04:03:12 crc kubenswrapper[5070]: I1213 04:03:12.637978 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ae7d2789-ba9f-497c-a444-a6a973ae174d","Type":"ContainerDied","Data":"158d54d24f96255ea9ac0e5e391f70e01b4abd627af90260f0f30443b4dac8dd"} Dec 13 04:03:12 crc kubenswrapper[5070]: I1213 04:03:12.639961 5070 generic.go:334] "Generic (PLEG): container finished" podID="0f20c4db-2943-474f-970d-02d52f185c6e" containerID="3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db" exitCode=0 Dec 13 04:03:12 crc kubenswrapper[5070]: I1213 04:03:12.639984 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-677fbcc968-jlxpr" event={"ID":"0f20c4db-2943-474f-970d-02d52f185c6e","Type":"ContainerDied","Data":"3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db"} Dec 13 04:03:12 crc kubenswrapper[5070]: I1213 04:03:12.793202 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-677fbcc968-jlxpr" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.238:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.238:8443: connect: connection refused" Dec 13 04:03:14 crc kubenswrapper[5070]: I1213 04:03:14.660361 5070 generic.go:334] "Generic (PLEG): container finished" podID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerID="94082177bcb0ed710678b98bb5f42bdd8ed78ba2c8769ec5466eee631cb71cfb" exitCode=0 Dec 13 04:03:14 crc kubenswrapper[5070]: I1213 04:03:14.660428 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ae7d2789-ba9f-497c-a444-a6a973ae174d","Type":"ContainerDied","Data":"94082177bcb0ed710678b98bb5f42bdd8ed78ba2c8769ec5466eee631cb71cfb"} Dec 13 04:03:14 crc kubenswrapper[5070]: I1213 04:03:14.760333 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" podUID="ffe09009-9b3f-4256-b12a-a4ee47c59d45" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.193:5353: connect: connection refused" Dec 13 04:03:14 crc kubenswrapper[5070]: I1213 04:03:14.963443 5070 scope.go:117] "RemoveContainer" containerID="4eaec48b8f58af63835e697261e9729140d85cb757ebec84f31e233e59542860" Dec 13 04:03:15 crc kubenswrapper[5070]: I1213 04:03:15.765978 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 13 04:03:17 crc kubenswrapper[5070]: W1213 04:03:17.156018 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e8258a6_caa4_4149_b318_2f985cb3ccdc.slice/crio-2d134256bc0bd3aec097c837beedd9026e92358c2fb681581029da59dbf7f08c WatchSource:0}: Error finding container 2d134256bc0bd3aec097c837beedd9026e92358c2fb681581029da59dbf7f08c: Status 404 returned error can't find the container with id 2d134256bc0bd3aec097c837beedd9026e92358c2fb681581029da59dbf7f08c Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.379887 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.386652 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.509831 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-scripts\") pod \"ae7d2789-ba9f-497c-a444-a6a973ae174d\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.509910 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-config-data\") pod \"ae7d2789-ba9f-497c-a444-a6a973ae174d\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.509940 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-openstack-edpm-ipam\") pod \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.509998 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-ceilometer-tls-certs\") pod \"ae7d2789-ba9f-497c-a444-a6a973ae174d\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.510070 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-config\") pod \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.510107 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-nb\") pod \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.510130 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-log-httpd\") pod \"ae7d2789-ba9f-497c-a444-a6a973ae174d\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.510243 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxhph\" (UniqueName: \"kubernetes.io/projected/ffe09009-9b3f-4256-b12a-a4ee47c59d45-kube-api-access-pxhph\") pod \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.510272 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-dns-svc\") pod \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.510321 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-sb\") pod \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\" (UID: \"ffe09009-9b3f-4256-b12a-a4ee47c59d45\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.510342 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-combined-ca-bundle\") pod \"ae7d2789-ba9f-497c-a444-a6a973ae174d\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.510359 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-sg-core-conf-yaml\") pod \"ae7d2789-ba9f-497c-a444-a6a973ae174d\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.510395 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-run-httpd\") pod \"ae7d2789-ba9f-497c-a444-a6a973ae174d\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.510416 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxqrq\" (UniqueName: \"kubernetes.io/projected/ae7d2789-ba9f-497c-a444-a6a973ae174d-kube-api-access-qxqrq\") pod \"ae7d2789-ba9f-497c-a444-a6a973ae174d\" (UID: \"ae7d2789-ba9f-497c-a444-a6a973ae174d\") " Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.512074 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ae7d2789-ba9f-497c-a444-a6a973ae174d" (UID: "ae7d2789-ba9f-497c-a444-a6a973ae174d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.516209 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-scripts" (OuterVolumeSpecName: "scripts") pod "ae7d2789-ba9f-497c-a444-a6a973ae174d" (UID: "ae7d2789-ba9f-497c-a444-a6a973ae174d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.516876 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae7d2789-ba9f-497c-a444-a6a973ae174d-kube-api-access-qxqrq" (OuterVolumeSpecName: "kube-api-access-qxqrq") pod "ae7d2789-ba9f-497c-a444-a6a973ae174d" (UID: "ae7d2789-ba9f-497c-a444-a6a973ae174d"). InnerVolumeSpecName "kube-api-access-qxqrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.516944 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffe09009-9b3f-4256-b12a-a4ee47c59d45-kube-api-access-pxhph" (OuterVolumeSpecName: "kube-api-access-pxhph") pod "ffe09009-9b3f-4256-b12a-a4ee47c59d45" (UID: "ffe09009-9b3f-4256-b12a-a4ee47c59d45"). InnerVolumeSpecName "kube-api-access-pxhph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.517256 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ae7d2789-ba9f-497c-a444-a6a973ae174d" (UID: "ae7d2789-ba9f-497c-a444-a6a973ae174d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.589008 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ae7d2789-ba9f-497c-a444-a6a973ae174d" (UID: "ae7d2789-ba9f-497c-a444-a6a973ae174d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.606390 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "ffe09009-9b3f-4256-b12a-a4ee47c59d45" (UID: "ffe09009-9b3f-4256-b12a-a4ee47c59d45"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.612563 5070 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.612601 5070 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.612613 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxhph\" (UniqueName: \"kubernetes.io/projected/ffe09009-9b3f-4256-b12a-a4ee47c59d45-kube-api-access-pxhph\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.612626 5070 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.612637 5070 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae7d2789-ba9f-497c-a444-a6a973ae174d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.612648 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxqrq\" (UniqueName: \"kubernetes.io/projected/ae7d2789-ba9f-497c-a444-a6a973ae174d-kube-api-access-qxqrq\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.612661 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.616569 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-config" (OuterVolumeSpecName: "config") pod "ffe09009-9b3f-4256-b12a-a4ee47c59d45" (UID: "ffe09009-9b3f-4256-b12a-a4ee47c59d45"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.643943 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ffe09009-9b3f-4256-b12a-a4ee47c59d45" (UID: "ffe09009-9b3f-4256-b12a-a4ee47c59d45"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.664416 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "ae7d2789-ba9f-497c-a444-a6a973ae174d" (UID: "ae7d2789-ba9f-497c-a444-a6a973ae174d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.669123 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ffe09009-9b3f-4256-b12a-a4ee47c59d45" (UID: "ffe09009-9b3f-4256-b12a-a4ee47c59d45"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.678024 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ffe09009-9b3f-4256-b12a-a4ee47c59d45" (UID: "ffe09009-9b3f-4256-b12a-a4ee47c59d45"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.693134 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"1e8258a6-caa4-4149-b318-2f985cb3ccdc","Type":"ContainerStarted","Data":"2d134256bc0bd3aec097c837beedd9026e92358c2fb681581029da59dbf7f08c"} Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.698494 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" event={"ID":"ffe09009-9b3f-4256-b12a-a4ee47c59d45","Type":"ContainerDied","Data":"f7a3a310aa7d628623df20e8a54fcca2f75cd7fa21c518a2aa5bd9b23ef2ae15"} Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.698541 5070 scope.go:117] "RemoveContainer" containerID="af437229b7604d90d849225a5a7cd682b4b13b89ae88322aaca5288dc4ab6fd0" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.698576 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbc59fbb7-6vl2v" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.704851 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ae7d2789-ba9f-497c-a444-a6a973ae174d","Type":"ContainerDied","Data":"9c04bc14d773cc2b53835361270bcb275ed128ba4e6833d2290349d1918c8ac5"} Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.704901 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.714005 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.714033 5070 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.714045 5070 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-config\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.714055 5070 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.714063 5070 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffe09009-9b3f-4256-b12a-a4ee47c59d45-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.728955 5070 scope.go:117] "RemoveContainer" containerID="c6e92bdbe59f8cc22c2604afa0651de362556fb5bfb649945a2d2706d0012863" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.744245 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae7d2789-ba9f-497c-a444-a6a973ae174d" (UID: "ae7d2789-ba9f-497c-a444-a6a973ae174d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.745442 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-6vl2v"] Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.746833 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-config-data" (OuterVolumeSpecName: "config-data") pod "ae7d2789-ba9f-497c-a444-a6a973ae174d" (UID: "ae7d2789-ba9f-497c-a444-a6a973ae174d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.753598 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fbc59fbb7-6vl2v"] Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.780901 5070 scope.go:117] "RemoveContainer" containerID="4bf21c7eaa6cb8982af7d872506ff0e69122e8ab58dbaccb40a8dc4f683b05f3" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.815423 5070 scope.go:117] "RemoveContainer" containerID="22cf25b51e02a9782bce2e80e4af85b93bfc55609ae89cbc2d9d3ef418b8908a" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.816877 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.816958 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae7d2789-ba9f-497c-a444-a6a973ae174d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.838628 5070 scope.go:117] "RemoveContainer" containerID="94082177bcb0ed710678b98bb5f42bdd8ed78ba2c8769ec5466eee631cb71cfb" Dec 13 04:03:17 crc kubenswrapper[5070]: I1213 04:03:17.875295 5070 scope.go:117] "RemoveContainer" containerID="158d54d24f96255ea9ac0e5e391f70e01b4abd627af90260f0f30443b4dac8dd" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.057665 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.064626 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.073864 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 04:03:18 crc kubenswrapper[5070]: E1213 04:03:18.074364 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffe09009-9b3f-4256-b12a-a4ee47c59d45" containerName="init" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074388 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffe09009-9b3f-4256-b12a-a4ee47c59d45" containerName="init" Dec 13 04:03:18 crc kubenswrapper[5070]: E1213 04:03:18.074402 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="sg-core" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074413 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="sg-core" Dec 13 04:03:18 crc kubenswrapper[5070]: E1213 04:03:18.074445 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffe09009-9b3f-4256-b12a-a4ee47c59d45" containerName="dnsmasq-dns" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074505 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffe09009-9b3f-4256-b12a-a4ee47c59d45" containerName="dnsmasq-dns" Dec 13 04:03:18 crc kubenswrapper[5070]: E1213 04:03:18.074534 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="proxy-httpd" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074543 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="proxy-httpd" Dec 13 04:03:18 crc kubenswrapper[5070]: E1213 04:03:18.074560 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="ceilometer-central-agent" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074568 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="ceilometer-central-agent" Dec 13 04:03:18 crc kubenswrapper[5070]: E1213 04:03:18.074585 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="ceilometer-notification-agent" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074593 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="ceilometer-notification-agent" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074831 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="ceilometer-notification-agent" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074855 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffe09009-9b3f-4256-b12a-a4ee47c59d45" containerName="dnsmasq-dns" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074868 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="proxy-httpd" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074884 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="ceilometer-central-agent" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.074896 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" containerName="sg-core" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.077006 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.079944 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.080208 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.086064 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.087702 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.212031 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae7d2789-ba9f-497c-a444-a6a973ae174d" path="/var/lib/kubelet/pods/ae7d2789-ba9f-497c-a444-a6a973ae174d/volumes" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.212804 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffe09009-9b3f-4256-b12a-a4ee47c59d45" path="/var/lib/kubelet/pods/ffe09009-9b3f-4256-b12a-a4ee47c59d45/volumes" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.229853 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fblzz\" (UniqueName: \"kubernetes.io/projected/ecb1773c-197e-4e3a-9c38-9106ae722cbe-kube-api-access-fblzz\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.229889 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecb1773c-197e-4e3a-9c38-9106ae722cbe-log-httpd\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.229920 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecb1773c-197e-4e3a-9c38-9106ae722cbe-run-httpd\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.229974 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-scripts\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.230026 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.230043 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-config-data\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.230067 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.230093 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.331231 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-config-data\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.331310 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.331373 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.331506 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fblzz\" (UniqueName: \"kubernetes.io/projected/ecb1773c-197e-4e3a-9c38-9106ae722cbe-kube-api-access-fblzz\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.331524 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecb1773c-197e-4e3a-9c38-9106ae722cbe-log-httpd\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.331592 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecb1773c-197e-4e3a-9c38-9106ae722cbe-run-httpd\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.331675 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-scripts\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.331757 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.332778 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecb1773c-197e-4e3a-9c38-9106ae722cbe-log-httpd\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.333030 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecb1773c-197e-4e3a-9c38-9106ae722cbe-run-httpd\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.336313 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.337412 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.341417 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-config-data\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.342688 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.345134 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecb1773c-197e-4e3a-9c38-9106ae722cbe-scripts\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.358176 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fblzz\" (UniqueName: \"kubernetes.io/projected/ecb1773c-197e-4e3a-9c38-9106ae722cbe-kube-api-access-fblzz\") pod \"ceilometer-0\" (UID: \"ecb1773c-197e-4e3a-9c38-9106ae722cbe\") " pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.412195 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.722638 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"709384c1-56bc-451b-939e-910783412f47","Type":"ContainerStarted","Data":"903321bf578d07a2bd364ff87cad0439861337300c5e1b292b0f12feb32bae74"} Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.722947 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"709384c1-56bc-451b-939e-910783412f47","Type":"ContainerStarted","Data":"f6ca60f0eb94658e725e11f48aa06546403474a7434df704dc0dc7be0a44dcb8"} Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.745396 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"1e8258a6-caa4-4149-b318-2f985cb3ccdc","Type":"ContainerStarted","Data":"5c06f3a76bbf1c3bc3aef124876309110f2f1667ee398fc40cf73f58ba93cec9"} Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.745462 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"1e8258a6-caa4-4149-b318-2f985cb3ccdc","Type":"ContainerStarted","Data":"f97dc2ab3efd18490c55440701580a9920b4b31a7bf7b26dd202a49768de2d7e"} Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.745755 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.761562 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.089596201 podStartE2EDuration="18.761543419s" podCreationTimestamp="2025-12-13 04:03:00 +0000 UTC" firstStartedPulling="2025-12-13 04:03:01.550648429 +0000 UTC m=+3073.786491975" lastFinishedPulling="2025-12-13 04:03:17.222595647 +0000 UTC m=+3089.458439193" observedRunningTime="2025-12-13 04:03:18.749136531 +0000 UTC m=+3090.984980077" watchObservedRunningTime="2025-12-13 04:03:18.761543419 +0000 UTC m=+3090.997386965" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.782371 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=7.782351317 podStartE2EDuration="7.782351317s" podCreationTimestamp="2025-12-13 04:03:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 04:03:18.778382329 +0000 UTC m=+3091.014225885" watchObservedRunningTime="2025-12-13 04:03:18.782351317 +0000 UTC m=+3091.018194853" Dec 13 04:03:18 crc kubenswrapper[5070]: I1213 04:03:18.944738 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 04:03:18 crc kubenswrapper[5070]: W1213 04:03:18.948298 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podecb1773c_197e_4e3a_9c38_9106ae722cbe.slice/crio-d8bd012254b20e5c8c1ddd4ccb3f899ed402c78afb76e070f80849ac11a545a2 WatchSource:0}: Error finding container d8bd012254b20e5c8c1ddd4ccb3f899ed402c78afb76e070f80849ac11a545a2: Status 404 returned error can't find the container with id d8bd012254b20e5c8c1ddd4ccb3f899ed402c78afb76e070f80849ac11a545a2 Dec 13 04:03:19 crc kubenswrapper[5070]: I1213 04:03:19.763293 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecb1773c-197e-4e3a-9c38-9106ae722cbe","Type":"ContainerStarted","Data":"d8bd012254b20e5c8c1ddd4ccb3f899ed402c78afb76e070f80849ac11a545a2"} Dec 13 04:03:20 crc kubenswrapper[5070]: I1213 04:03:20.774054 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecb1773c-197e-4e3a-9c38-9106ae722cbe","Type":"ContainerStarted","Data":"294c2e30c31d4ca73af52b4968112ce5e94ae4f39584c355c18213c2560d3637"} Dec 13 04:03:20 crc kubenswrapper[5070]: I1213 04:03:20.781376 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Dec 13 04:03:21 crc kubenswrapper[5070]: I1213 04:03:21.785286 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecb1773c-197e-4e3a-9c38-9106ae722cbe","Type":"ContainerStarted","Data":"b59ca8bae229fafe00b3b74c912575e233feb7cfcfb2ab1a5adebb0d7cf58ff3"} Dec 13 04:03:21 crc kubenswrapper[5070]: I1213 04:03:21.943271 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:03:21 crc kubenswrapper[5070]: I1213 04:03:21.943621 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:03:22 crc kubenswrapper[5070]: I1213 04:03:22.317226 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Dec 13 04:03:22 crc kubenswrapper[5070]: I1213 04:03:22.368777 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 04:03:22 crc kubenswrapper[5070]: I1213 04:03:22.793717 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-677fbcc968-jlxpr" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.238:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.238:8443: connect: connection refused" Dec 13 04:03:22 crc kubenswrapper[5070]: I1213 04:03:22.797395 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="626e6c11-d496-4667-a00d-b84fbf1fa0de" containerName="manila-scheduler" containerID="cri-o://58d5f36aed0a9999c2fd56640fa60ab1358bded52e6be73db27baad2f77aae5f" gracePeriod=30 Dec 13 04:03:22 crc kubenswrapper[5070]: I1213 04:03:22.797522 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecb1773c-197e-4e3a-9c38-9106ae722cbe","Type":"ContainerStarted","Data":"6a039e93e4ca6f5592109244e1db2b659bde29d94dc360450f52dd73a8108788"} Dec 13 04:03:22 crc kubenswrapper[5070]: I1213 04:03:22.797865 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="626e6c11-d496-4667-a00d-b84fbf1fa0de" containerName="probe" containerID="cri-o://e455d1047ce275af0e3f76ccfa09976c6827783456f195aabc937fe1c46bfbbf" gracePeriod=30 Dec 13 04:03:23 crc kubenswrapper[5070]: I1213 04:03:23.807248 5070 generic.go:334] "Generic (PLEG): container finished" podID="626e6c11-d496-4667-a00d-b84fbf1fa0de" containerID="e455d1047ce275af0e3f76ccfa09976c6827783456f195aabc937fe1c46bfbbf" exitCode=0 Dec 13 04:03:23 crc kubenswrapper[5070]: I1213 04:03:23.807561 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"626e6c11-d496-4667-a00d-b84fbf1fa0de","Type":"ContainerDied","Data":"e455d1047ce275af0e3f76ccfa09976c6827783456f195aabc937fe1c46bfbbf"} Dec 13 04:03:23 crc kubenswrapper[5070]: I1213 04:03:23.809536 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecb1773c-197e-4e3a-9c38-9106ae722cbe","Type":"ContainerStarted","Data":"0e605ac42c2174c0101a6d8744d44ecabd6d5cef26d07e8cb160cd920b4f47c5"} Dec 13 04:03:23 crc kubenswrapper[5070]: I1213 04:03:23.810859 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 04:03:23 crc kubenswrapper[5070]: I1213 04:03:23.836057 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.620553143 podStartE2EDuration="5.836035645s" podCreationTimestamp="2025-12-13 04:03:18 +0000 UTC" firstStartedPulling="2025-12-13 04:03:18.951718389 +0000 UTC m=+3091.187561935" lastFinishedPulling="2025-12-13 04:03:23.167200891 +0000 UTC m=+3095.403044437" observedRunningTime="2025-12-13 04:03:23.831917683 +0000 UTC m=+3096.067761229" watchObservedRunningTime="2025-12-13 04:03:23.836035645 +0000 UTC m=+3096.071879191" Dec 13 04:03:24 crc kubenswrapper[5070]: I1213 04:03:24.824198 5070 generic.go:334] "Generic (PLEG): container finished" podID="626e6c11-d496-4667-a00d-b84fbf1fa0de" containerID="58d5f36aed0a9999c2fd56640fa60ab1358bded52e6be73db27baad2f77aae5f" exitCode=0 Dec 13 04:03:24 crc kubenswrapper[5070]: I1213 04:03:24.824369 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"626e6c11-d496-4667-a00d-b84fbf1fa0de","Type":"ContainerDied","Data":"58d5f36aed0a9999c2fd56640fa60ab1358bded52e6be73db27baad2f77aae5f"} Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.250946 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.401236 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-combined-ca-bundle\") pod \"626e6c11-d496-4667-a00d-b84fbf1fa0de\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.401308 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/626e6c11-d496-4667-a00d-b84fbf1fa0de-etc-machine-id\") pod \"626e6c11-d496-4667-a00d-b84fbf1fa0de\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.401335 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-scripts\") pod \"626e6c11-d496-4667-a00d-b84fbf1fa0de\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.401437 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data-custom\") pod \"626e6c11-d496-4667-a00d-b84fbf1fa0de\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.401459 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/626e6c11-d496-4667-a00d-b84fbf1fa0de-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "626e6c11-d496-4667-a00d-b84fbf1fa0de" (UID: "626e6c11-d496-4667-a00d-b84fbf1fa0de"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.401506 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data\") pod \"626e6c11-d496-4667-a00d-b84fbf1fa0de\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.401590 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdhvk\" (UniqueName: \"kubernetes.io/projected/626e6c11-d496-4667-a00d-b84fbf1fa0de-kube-api-access-cdhvk\") pod \"626e6c11-d496-4667-a00d-b84fbf1fa0de\" (UID: \"626e6c11-d496-4667-a00d-b84fbf1fa0de\") " Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.401977 5070 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/626e6c11-d496-4667-a00d-b84fbf1fa0de-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.407088 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-scripts" (OuterVolumeSpecName: "scripts") pod "626e6c11-d496-4667-a00d-b84fbf1fa0de" (UID: "626e6c11-d496-4667-a00d-b84fbf1fa0de"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.409761 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/626e6c11-d496-4667-a00d-b84fbf1fa0de-kube-api-access-cdhvk" (OuterVolumeSpecName: "kube-api-access-cdhvk") pod "626e6c11-d496-4667-a00d-b84fbf1fa0de" (UID: "626e6c11-d496-4667-a00d-b84fbf1fa0de"). InnerVolumeSpecName "kube-api-access-cdhvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.413015 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "626e6c11-d496-4667-a00d-b84fbf1fa0de" (UID: "626e6c11-d496-4667-a00d-b84fbf1fa0de"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.468672 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "626e6c11-d496-4667-a00d-b84fbf1fa0de" (UID: "626e6c11-d496-4667-a00d-b84fbf1fa0de"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.504563 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdhvk\" (UniqueName: \"kubernetes.io/projected/626e6c11-d496-4667-a00d-b84fbf1fa0de-kube-api-access-cdhvk\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.504597 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.504606 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.504616 5070 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.507630 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data" (OuterVolumeSpecName: "config-data") pod "626e6c11-d496-4667-a00d-b84fbf1fa0de" (UID: "626e6c11-d496-4667-a00d-b84fbf1fa0de"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.606083 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/626e6c11-d496-4667-a00d-b84fbf1fa0de-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.835836 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.835881 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"626e6c11-d496-4667-a00d-b84fbf1fa0de","Type":"ContainerDied","Data":"09c312ab4a8b3d99ad28eafc973531d44919522a4c41604e9472b9a0a32f932f"} Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.835939 5070 scope.go:117] "RemoveContainer" containerID="e455d1047ce275af0e3f76ccfa09976c6827783456f195aabc937fe1c46bfbbf" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.875931 5070 scope.go:117] "RemoveContainer" containerID="58d5f36aed0a9999c2fd56640fa60ab1358bded52e6be73db27baad2f77aae5f" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.898910 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.961522 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.971950 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 04:03:25 crc kubenswrapper[5070]: E1213 04:03:25.976373 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="626e6c11-d496-4667-a00d-b84fbf1fa0de" containerName="manila-scheduler" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.976402 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="626e6c11-d496-4667-a00d-b84fbf1fa0de" containerName="manila-scheduler" Dec 13 04:03:25 crc kubenswrapper[5070]: E1213 04:03:25.976434 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="626e6c11-d496-4667-a00d-b84fbf1fa0de" containerName="probe" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.976455 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="626e6c11-d496-4667-a00d-b84fbf1fa0de" containerName="probe" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.976626 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="626e6c11-d496-4667-a00d-b84fbf1fa0de" containerName="probe" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.976705 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="626e6c11-d496-4667-a00d-b84fbf1fa0de" containerName="manila-scheduler" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.977690 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 04:03:25 crc kubenswrapper[5070]: I1213 04:03:25.982024 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:25.999112 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.159076 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-config-data\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.159134 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.159165 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx5nh\" (UniqueName: \"kubernetes.io/projected/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-kube-api-access-bx5nh\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.159214 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-scripts\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.159285 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.159342 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.179431 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="626e6c11-d496-4667-a00d-b84fbf1fa0de" path="/var/lib/kubelet/pods/626e6c11-d496-4667-a00d-b84fbf1fa0de/volumes" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.263856 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.263973 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.264071 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-config-data\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.264096 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.264122 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx5nh\" (UniqueName: \"kubernetes.io/projected/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-kube-api-access-bx5nh\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.264188 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-scripts\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.264194 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.271396 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.272163 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-config-data\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.280022 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.282154 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx5nh\" (UniqueName: \"kubernetes.io/projected/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-kube-api-access-bx5nh\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.284240 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7227b03a-84e0-4c9b-bb1a-baaaeb6828a9-scripts\") pod \"manila-scheduler-0\" (UID: \"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9\") " pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.295279 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.798070 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 04:03:26 crc kubenswrapper[5070]: I1213 04:03:26.850691 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9","Type":"ContainerStarted","Data":"34b327b22cd21d4ffa758a1900e93afd7d1794702cd1d854c91751b963f6eaa3"} Dec 13 04:03:27 crc kubenswrapper[5070]: I1213 04:03:27.867204 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9","Type":"ContainerStarted","Data":"786a5e53b27ff276c41426bf2a4658fe5eacff9cf63725c893dcfc37ee20d7ea"} Dec 13 04:03:27 crc kubenswrapper[5070]: I1213 04:03:27.868067 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"7227b03a-84e0-4c9b-bb1a-baaaeb6828a9","Type":"ContainerStarted","Data":"eb00fb88a2ca53ada5c3922ad37ac2d9611da214be44cd817702a522c06f28fa"} Dec 13 04:03:27 crc kubenswrapper[5070]: I1213 04:03:27.890371 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.890348638 podStartE2EDuration="2.890348638s" podCreationTimestamp="2025-12-13 04:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 04:03:27.886348718 +0000 UTC m=+3100.122192294" watchObservedRunningTime="2025-12-13 04:03:27.890348638 +0000 UTC m=+3100.126192184" Dec 13 04:03:32 crc kubenswrapper[5070]: I1213 04:03:32.331380 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Dec 13 04:03:32 crc kubenswrapper[5070]: I1213 04:03:32.398416 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 04:03:32 crc kubenswrapper[5070]: I1213 04:03:32.794110 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-677fbcc968-jlxpr" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.238:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.238:8443: connect: connection refused" Dec 13 04:03:32 crc kubenswrapper[5070]: I1213 04:03:32.794316 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:03:32 crc kubenswrapper[5070]: I1213 04:03:32.943237 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="709384c1-56bc-451b-939e-910783412f47" containerName="manila-share" containerID="cri-o://f6ca60f0eb94658e725e11f48aa06546403474a7434df704dc0dc7be0a44dcb8" gracePeriod=30 Dec 13 04:03:32 crc kubenswrapper[5070]: I1213 04:03:32.943339 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="709384c1-56bc-451b-939e-910783412f47" containerName="probe" containerID="cri-o://903321bf578d07a2bd364ff87cad0439861337300c5e1b292b0f12feb32bae74" gracePeriod=30 Dec 13 04:03:33 crc kubenswrapper[5070]: I1213 04:03:33.313017 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Dec 13 04:03:33 crc kubenswrapper[5070]: I1213 04:03:33.954996 5070 generic.go:334] "Generic (PLEG): container finished" podID="709384c1-56bc-451b-939e-910783412f47" containerID="903321bf578d07a2bd364ff87cad0439861337300c5e1b292b0f12feb32bae74" exitCode=0 Dec 13 04:03:33 crc kubenswrapper[5070]: I1213 04:03:33.955365 5070 generic.go:334] "Generic (PLEG): container finished" podID="709384c1-56bc-451b-939e-910783412f47" containerID="f6ca60f0eb94658e725e11f48aa06546403474a7434df704dc0dc7be0a44dcb8" exitCode=1 Dec 13 04:03:33 crc kubenswrapper[5070]: I1213 04:03:33.955091 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"709384c1-56bc-451b-939e-910783412f47","Type":"ContainerDied","Data":"903321bf578d07a2bd364ff87cad0439861337300c5e1b292b0f12feb32bae74"} Dec 13 04:03:33 crc kubenswrapper[5070]: I1213 04:03:33.955414 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"709384c1-56bc-451b-939e-910783412f47","Type":"ContainerDied","Data":"f6ca60f0eb94658e725e11f48aa06546403474a7434df704dc0dc7be0a44dcb8"} Dec 13 04:03:33 crc kubenswrapper[5070]: I1213 04:03:33.955432 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"709384c1-56bc-451b-939e-910783412f47","Type":"ContainerDied","Data":"b905105736269a59af08801fc47209d83fc567d229631cec5f5b6ceb4c6574b8"} Dec 13 04:03:33 crc kubenswrapper[5070]: I1213 04:03:33.955465 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b905105736269a59af08801fc47209d83fc567d229631cec5f5b6ceb4c6574b8" Dec 13 04:03:33 crc kubenswrapper[5070]: I1213 04:03:33.986508 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.044395 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m2bt\" (UniqueName: \"kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-kube-api-access-2m2bt\") pod \"709384c1-56bc-451b-939e-910783412f47\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.044472 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data-custom\") pod \"709384c1-56bc-451b-939e-910783412f47\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.044541 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-etc-machine-id\") pod \"709384c1-56bc-451b-939e-910783412f47\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.044573 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-var-lib-manila\") pod \"709384c1-56bc-451b-939e-910783412f47\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.044608 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data\") pod \"709384c1-56bc-451b-939e-910783412f47\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.044637 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-scripts\") pod \"709384c1-56bc-451b-939e-910783412f47\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.044704 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-ceph\") pod \"709384c1-56bc-451b-939e-910783412f47\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.044742 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-combined-ca-bundle\") pod \"709384c1-56bc-451b-939e-910783412f47\" (UID: \"709384c1-56bc-451b-939e-910783412f47\") " Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.046354 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "709384c1-56bc-451b-939e-910783412f47" (UID: "709384c1-56bc-451b-939e-910783412f47"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.046405 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "709384c1-56bc-451b-939e-910783412f47" (UID: "709384c1-56bc-451b-939e-910783412f47"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.051989 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-scripts" (OuterVolumeSpecName: "scripts") pod "709384c1-56bc-451b-939e-910783412f47" (UID: "709384c1-56bc-451b-939e-910783412f47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.053271 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-kube-api-access-2m2bt" (OuterVolumeSpecName: "kube-api-access-2m2bt") pod "709384c1-56bc-451b-939e-910783412f47" (UID: "709384c1-56bc-451b-939e-910783412f47"). InnerVolumeSpecName "kube-api-access-2m2bt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.058681 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "709384c1-56bc-451b-939e-910783412f47" (UID: "709384c1-56bc-451b-939e-910783412f47"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.058768 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-ceph" (OuterVolumeSpecName: "ceph") pod "709384c1-56bc-451b-939e-910783412f47" (UID: "709384c1-56bc-451b-939e-910783412f47"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.109806 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "709384c1-56bc-451b-939e-910783412f47" (UID: "709384c1-56bc-451b-939e-910783412f47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.147339 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data" (OuterVolumeSpecName: "config-data") pod "709384c1-56bc-451b-939e-910783412f47" (UID: "709384c1-56bc-451b-939e-910783412f47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.148131 5070 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.148157 5070 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/709384c1-56bc-451b-939e-910783412f47-var-lib-manila\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.148166 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.148179 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.148188 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.148197 5070 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.148205 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m2bt\" (UniqueName: \"kubernetes.io/projected/709384c1-56bc-451b-939e-910783412f47-kube-api-access-2m2bt\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.148213 5070 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/709384c1-56bc-451b-939e-910783412f47-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.962940 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.987970 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 04:03:34 crc kubenswrapper[5070]: I1213 04:03:34.996761 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.015870 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 04:03:35 crc kubenswrapper[5070]: E1213 04:03:35.016255 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="709384c1-56bc-451b-939e-910783412f47" containerName="probe" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.016267 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="709384c1-56bc-451b-939e-910783412f47" containerName="probe" Dec 13 04:03:35 crc kubenswrapper[5070]: E1213 04:03:35.016278 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="709384c1-56bc-451b-939e-910783412f47" containerName="manila-share" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.016284 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="709384c1-56bc-451b-939e-910783412f47" containerName="manila-share" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.016489 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="709384c1-56bc-451b-939e-910783412f47" containerName="probe" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.016501 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="709384c1-56bc-451b-939e-910783412f47" containerName="manila-share" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.017481 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.020038 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.037057 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.167822 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.167880 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-config-data\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.167955 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.168005 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-scripts\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.168029 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-ceph\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.168130 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.168171 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.168202 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8rrq\" (UniqueName: \"kubernetes.io/projected/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-kube-api-access-r8rrq\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.270164 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.270223 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-scripts\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.270246 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-ceph\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.270357 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.270389 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.270411 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8rrq\" (UniqueName: \"kubernetes.io/projected/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-kube-api-access-r8rrq\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.270437 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.270469 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-config-data\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.271125 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.271150 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.275312 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-scripts\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.275370 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.276152 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.276593 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-ceph\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.276936 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-config-data\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.288281 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8rrq\" (UniqueName: \"kubernetes.io/projected/f73fc610-e05f-414a-a55a-17cdfdf6c3d6-kube-api-access-r8rrq\") pod \"manila-share-share1-0\" (UID: \"f73fc610-e05f-414a-a55a-17cdfdf6c3d6\") " pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.339178 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.837970 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 04:03:35 crc kubenswrapper[5070]: I1213 04:03:35.980251 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f73fc610-e05f-414a-a55a-17cdfdf6c3d6","Type":"ContainerStarted","Data":"0cf8816967b1b2bf06e548d75622468fca8cc81eb802e2e24d75c81b9e19cd0a"} Dec 13 04:03:36 crc kubenswrapper[5070]: I1213 04:03:36.180427 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="709384c1-56bc-451b-939e-910783412f47" path="/var/lib/kubelet/pods/709384c1-56bc-451b-939e-910783412f47/volumes" Dec 13 04:03:36 crc kubenswrapper[5070]: I1213 04:03:36.295855 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Dec 13 04:03:36 crc kubenswrapper[5070]: I1213 04:03:36.989722 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f73fc610-e05f-414a-a55a-17cdfdf6c3d6","Type":"ContainerStarted","Data":"4cf12456e340e8c461dfb147a8d86905f6b89a12d96a062a201427ef827b9ba3"} Dec 13 04:03:36 crc kubenswrapper[5070]: I1213 04:03:36.990078 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f73fc610-e05f-414a-a55a-17cdfdf6c3d6","Type":"ContainerStarted","Data":"c6e421690120a9081d3a86dd29661d651c6d07551335e8eb065677f0e8a69500"} Dec 13 04:03:37 crc kubenswrapper[5070]: I1213 04:03:37.015472 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.015455055 podStartE2EDuration="3.015455055s" podCreationTimestamp="2025-12-13 04:03:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 04:03:37.006585663 +0000 UTC m=+3109.242429229" watchObservedRunningTime="2025-12-13 04:03:37.015455055 +0000 UTC m=+3109.251298601" Dec 13 04:03:38 crc kubenswrapper[5070]: I1213 04:03:38.977201 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.017955 5070 generic.go:334] "Generic (PLEG): container finished" podID="0f20c4db-2943-474f-970d-02d52f185c6e" containerID="d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2" exitCode=137 Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.018025 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-677fbcc968-jlxpr" event={"ID":"0f20c4db-2943-474f-970d-02d52f185c6e","Type":"ContainerDied","Data":"d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2"} Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.018060 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-677fbcc968-jlxpr" event={"ID":"0f20c4db-2943-474f-970d-02d52f185c6e","Type":"ContainerDied","Data":"f469d6fe4621c2131ae6b361161ce0367bc8a0c37603b9e96b4d02edc092eac2"} Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.018081 5070 scope.go:117] "RemoveContainer" containerID="3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.018306 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-677fbcc968-jlxpr" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.153206 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-config-data\") pod \"0f20c4db-2943-474f-970d-02d52f185c6e\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.153393 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f20c4db-2943-474f-970d-02d52f185c6e-logs\") pod \"0f20c4db-2943-474f-970d-02d52f185c6e\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.153469 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-secret-key\") pod \"0f20c4db-2943-474f-970d-02d52f185c6e\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.153578 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-scripts\") pod \"0f20c4db-2943-474f-970d-02d52f185c6e\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.153644 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-combined-ca-bundle\") pod \"0f20c4db-2943-474f-970d-02d52f185c6e\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.153804 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9c8n\" (UniqueName: \"kubernetes.io/projected/0f20c4db-2943-474f-970d-02d52f185c6e-kube-api-access-l9c8n\") pod \"0f20c4db-2943-474f-970d-02d52f185c6e\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.153854 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-tls-certs\") pod \"0f20c4db-2943-474f-970d-02d52f185c6e\" (UID: \"0f20c4db-2943-474f-970d-02d52f185c6e\") " Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.153992 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f20c4db-2943-474f-970d-02d52f185c6e-logs" (OuterVolumeSpecName: "logs") pod "0f20c4db-2943-474f-970d-02d52f185c6e" (UID: "0f20c4db-2943-474f-970d-02d52f185c6e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.154507 5070 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f20c4db-2943-474f-970d-02d52f185c6e-logs\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.169098 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "0f20c4db-2943-474f-970d-02d52f185c6e" (UID: "0f20c4db-2943-474f-970d-02d52f185c6e"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.177866 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f20c4db-2943-474f-970d-02d52f185c6e-kube-api-access-l9c8n" (OuterVolumeSpecName: "kube-api-access-l9c8n") pod "0f20c4db-2943-474f-970d-02d52f185c6e" (UID: "0f20c4db-2943-474f-970d-02d52f185c6e"). InnerVolumeSpecName "kube-api-access-l9c8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.190713 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-config-data" (OuterVolumeSpecName: "config-data") pod "0f20c4db-2943-474f-970d-02d52f185c6e" (UID: "0f20c4db-2943-474f-970d-02d52f185c6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.193781 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-scripts" (OuterVolumeSpecName: "scripts") pod "0f20c4db-2943-474f-970d-02d52f185c6e" (UID: "0f20c4db-2943-474f-970d-02d52f185c6e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.197076 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f20c4db-2943-474f-970d-02d52f185c6e" (UID: "0f20c4db-2943-474f-970d-02d52f185c6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.214835 5070 scope.go:117] "RemoveContainer" containerID="d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.233771 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "0f20c4db-2943-474f-970d-02d52f185c6e" (UID: "0f20c4db-2943-474f-970d-02d52f185c6e"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.256271 5070 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.256299 5070 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.256311 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9c8n\" (UniqueName: \"kubernetes.io/projected/0f20c4db-2943-474f-970d-02d52f185c6e-kube-api-access-l9c8n\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.256319 5070 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.256328 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0f20c4db-2943-474f-970d-02d52f185c6e-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.256337 5070 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0f20c4db-2943-474f-970d-02d52f185c6e-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.272491 5070 scope.go:117] "RemoveContainer" containerID="3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db" Dec 13 04:03:39 crc kubenswrapper[5070]: E1213 04:03:39.273197 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db\": container with ID starting with 3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db not found: ID does not exist" containerID="3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.273242 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db"} err="failed to get container status \"3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db\": rpc error: code = NotFound desc = could not find container \"3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db\": container with ID starting with 3b5170299cf86540bf9ffcd68a1aefacc52df624faa0a24827107039f9b865db not found: ID does not exist" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.273268 5070 scope.go:117] "RemoveContainer" containerID="d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2" Dec 13 04:03:39 crc kubenswrapper[5070]: E1213 04:03:39.275032 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2\": container with ID starting with d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2 not found: ID does not exist" containerID="d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.275068 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2"} err="failed to get container status \"d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2\": rpc error: code = NotFound desc = could not find container \"d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2\": container with ID starting with d5fcca5b12c472f0d65862cba81af0ae790a9449ba5fe95a7e1a1ac1aa1750f2 not found: ID does not exist" Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.349548 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-677fbcc968-jlxpr"] Dec 13 04:03:39 crc kubenswrapper[5070]: I1213 04:03:39.357539 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-677fbcc968-jlxpr"] Dec 13 04:03:40 crc kubenswrapper[5070]: I1213 04:03:40.180950 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" path="/var/lib/kubelet/pods/0f20c4db-2943-474f-970d-02d52f185c6e/volumes" Dec 13 04:03:45 crc kubenswrapper[5070]: I1213 04:03:45.339762 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Dec 13 04:03:47 crc kubenswrapper[5070]: I1213 04:03:47.918216 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Dec 13 04:03:48 crc kubenswrapper[5070]: I1213 04:03:48.423312 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 13 04:03:51 crc kubenswrapper[5070]: I1213 04:03:51.942898 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:03:51 crc kubenswrapper[5070]: I1213 04:03:51.943198 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:03:56 crc kubenswrapper[5070]: I1213 04:03:56.887814 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Dec 13 04:04:21 crc kubenswrapper[5070]: I1213 04:04:21.942483 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:04:21 crc kubenswrapper[5070]: I1213 04:04:21.943184 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:04:21 crc kubenswrapper[5070]: I1213 04:04:21.943241 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 04:04:21 crc kubenswrapper[5070]: I1213 04:04:21.944291 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 04:04:21 crc kubenswrapper[5070]: I1213 04:04:21.944358 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" gracePeriod=600 Dec 13 04:04:22 crc kubenswrapper[5070]: I1213 04:04:22.468512 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" exitCode=0 Dec 13 04:04:22 crc kubenswrapper[5070]: I1213 04:04:22.468581 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298"} Dec 13 04:04:22 crc kubenswrapper[5070]: I1213 04:04:22.468886 5070 scope.go:117] "RemoveContainer" containerID="9987099f8e98d31d38862264f7e07bca0970527ae353fb5b7e9a8135e3aed8d3" Dec 13 04:04:22 crc kubenswrapper[5070]: E1213 04:04:22.602734 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:04:23 crc kubenswrapper[5070]: I1213 04:04:23.483365 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:04:23 crc kubenswrapper[5070]: E1213 04:04:23.483789 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:04:35 crc kubenswrapper[5070]: I1213 04:04:35.167399 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:04:35 crc kubenswrapper[5070]: E1213 04:04:35.168322 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.179604 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 13 04:04:44 crc kubenswrapper[5070]: E1213 04:04:44.180542 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon-log" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.180569 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon-log" Dec 13 04:04:44 crc kubenswrapper[5070]: E1213 04:04:44.180594 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.180603 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.180927 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon-log" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.180947 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f20c4db-2943-474f-970d-02d52f185c6e" containerName="horizon" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.181924 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.182020 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.210991 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.211032 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.211035 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.211760 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-l2j7l" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.359711 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-config-data\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.359981 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.360003 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.360027 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.360122 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.360168 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.360189 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.360261 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.360294 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhq74\" (UniqueName: \"kubernetes.io/projected/7414f018-4f89-4d33-a19a-af5e996ba16b-kube-api-access-rhq74\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.461735 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.461793 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.461814 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.461868 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.461904 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhq74\" (UniqueName: \"kubernetes.io/projected/7414f018-4f89-4d33-a19a-af5e996ba16b-kube-api-access-rhq74\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.461930 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-config-data\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.461956 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.461975 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.461995 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.462520 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.462540 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.463267 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.463905 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.464750 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-config-data\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.468731 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.469015 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.471151 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.488891 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhq74\" (UniqueName: \"kubernetes.io/projected/7414f018-4f89-4d33-a19a-af5e996ba16b-kube-api-access-rhq74\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.518646 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " pod="openstack/tempest-tests-tempest" Dec 13 04:04:44 crc kubenswrapper[5070]: I1213 04:04:44.535900 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 13 04:04:45 crc kubenswrapper[5070]: I1213 04:04:45.014346 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 13 04:04:45 crc kubenswrapper[5070]: I1213 04:04:45.692666 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7414f018-4f89-4d33-a19a-af5e996ba16b","Type":"ContainerStarted","Data":"3a79310110f02498db9e3a8c0b25832268fc84944139b04d9c8e13d3a09f089e"} Dec 13 04:04:46 crc kubenswrapper[5070]: I1213 04:04:46.167306 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:04:46 crc kubenswrapper[5070]: E1213 04:04:46.167609 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.243003 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p658l"] Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.247895 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.250546 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-catalog-content\") pod \"redhat-marketplace-p658l\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.251217 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-utilities\") pod \"redhat-marketplace-p658l\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.252708 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvlzs\" (UniqueName: \"kubernetes.io/projected/791a319b-d49f-46c0-ad64-4fbc3ff12524-kube-api-access-pvlzs\") pod \"redhat-marketplace-p658l\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.254532 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p658l"] Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.363565 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-utilities\") pod \"redhat-marketplace-p658l\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.363636 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvlzs\" (UniqueName: \"kubernetes.io/projected/791a319b-d49f-46c0-ad64-4fbc3ff12524-kube-api-access-pvlzs\") pod \"redhat-marketplace-p658l\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.363712 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-catalog-content\") pod \"redhat-marketplace-p658l\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.364319 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-catalog-content\") pod \"redhat-marketplace-p658l\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.365976 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-utilities\") pod \"redhat-marketplace-p658l\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.385048 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvlzs\" (UniqueName: \"kubernetes.io/projected/791a319b-d49f-46c0-ad64-4fbc3ff12524-kube-api-access-pvlzs\") pod \"redhat-marketplace-p658l\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:48 crc kubenswrapper[5070]: I1213 04:04:48.580426 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:04:53 crc kubenswrapper[5070]: I1213 04:04:53.501116 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p658l"] Dec 13 04:04:53 crc kubenswrapper[5070]: W1213 04:04:53.506082 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod791a319b_d49f_46c0_ad64_4fbc3ff12524.slice/crio-dc33813435ad8cdc86468eadfab13a5e2e0cbf2d809bfa0c662f8c5d7bc9a550 WatchSource:0}: Error finding container dc33813435ad8cdc86468eadfab13a5e2e0cbf2d809bfa0c662f8c5d7bc9a550: Status 404 returned error can't find the container with id dc33813435ad8cdc86468eadfab13a5e2e0cbf2d809bfa0c662f8c5d7bc9a550 Dec 13 04:04:53 crc kubenswrapper[5070]: I1213 04:04:53.773387 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p658l" event={"ID":"791a319b-d49f-46c0-ad64-4fbc3ff12524","Type":"ContainerStarted","Data":"dc33813435ad8cdc86468eadfab13a5e2e0cbf2d809bfa0c662f8c5d7bc9a550"} Dec 13 04:04:55 crc kubenswrapper[5070]: I1213 04:04:55.793640 5070 generic.go:334] "Generic (PLEG): container finished" podID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerID="6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab" exitCode=0 Dec 13 04:04:55 crc kubenswrapper[5070]: I1213 04:04:55.793746 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p658l" event={"ID":"791a319b-d49f-46c0-ad64-4fbc3ff12524","Type":"ContainerDied","Data":"6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab"} Dec 13 04:05:00 crc kubenswrapper[5070]: I1213 04:05:00.167069 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:05:00 crc kubenswrapper[5070]: E1213 04:05:00.167560 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.580953 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-w26qp"] Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.610086 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.621186 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w26qp"] Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.778425 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-catalog-content\") pod \"community-operators-w26qp\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.778549 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-utilities\") pod \"community-operators-w26qp\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.778703 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6dxl\" (UniqueName: \"kubernetes.io/projected/1d76cdfb-92e1-4f64-af32-0e44eafb9588-kube-api-access-k6dxl\") pod \"community-operators-w26qp\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.880637 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6dxl\" (UniqueName: \"kubernetes.io/projected/1d76cdfb-92e1-4f64-af32-0e44eafb9588-kube-api-access-k6dxl\") pod \"community-operators-w26qp\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.880757 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-catalog-content\") pod \"community-operators-w26qp\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.880779 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-utilities\") pod \"community-operators-w26qp\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.881238 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-utilities\") pod \"community-operators-w26qp\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.881802 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-catalog-content\") pod \"community-operators-w26qp\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.909047 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6dxl\" (UniqueName: \"kubernetes.io/projected/1d76cdfb-92e1-4f64-af32-0e44eafb9588-kube-api-access-k6dxl\") pod \"community-operators-w26qp\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:02 crc kubenswrapper[5070]: I1213 04:05:02.973028 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:13 crc kubenswrapper[5070]: E1213 04:05:13.570149 5070 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 13 04:05:13 crc kubenswrapper[5070]: E1213 04:05:13.571815 5070 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rhq74,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(7414f018-4f89-4d33-a19a-af5e996ba16b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 04:05:13 crc kubenswrapper[5070]: E1213 04:05:13.573048 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="7414f018-4f89-4d33-a19a-af5e996ba16b" Dec 13 04:05:13 crc kubenswrapper[5070]: I1213 04:05:13.943815 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w26qp"] Dec 13 04:05:13 crc kubenswrapper[5070]: I1213 04:05:13.990265 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w26qp" event={"ID":"1d76cdfb-92e1-4f64-af32-0e44eafb9588","Type":"ContainerStarted","Data":"d83f0deddd5db76aaf8041b3acd2096483b883cd76923a313e3813f176ecb80f"} Dec 13 04:05:13 crc kubenswrapper[5070]: E1213 04:05:13.992030 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="7414f018-4f89-4d33-a19a-af5e996ba16b" Dec 13 04:05:15 crc kubenswrapper[5070]: I1213 04:05:15.003251 5070 generic.go:334] "Generic (PLEG): container finished" podID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerID="ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b" exitCode=0 Dec 13 04:05:15 crc kubenswrapper[5070]: I1213 04:05:15.003361 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p658l" event={"ID":"791a319b-d49f-46c0-ad64-4fbc3ff12524","Type":"ContainerDied","Data":"ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b"} Dec 13 04:05:15 crc kubenswrapper[5070]: I1213 04:05:15.009460 5070 generic.go:334] "Generic (PLEG): container finished" podID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerID="153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3" exitCode=0 Dec 13 04:05:15 crc kubenswrapper[5070]: I1213 04:05:15.009526 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w26qp" event={"ID":"1d76cdfb-92e1-4f64-af32-0e44eafb9588","Type":"ContainerDied","Data":"153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3"} Dec 13 04:05:15 crc kubenswrapper[5070]: I1213 04:05:15.167370 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:05:15 crc kubenswrapper[5070]: E1213 04:05:15.167750 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:05:16 crc kubenswrapper[5070]: I1213 04:05:16.019454 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p658l" event={"ID":"791a319b-d49f-46c0-ad64-4fbc3ff12524","Type":"ContainerStarted","Data":"2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc"} Dec 13 04:05:16 crc kubenswrapper[5070]: I1213 04:05:16.021942 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w26qp" event={"ID":"1d76cdfb-92e1-4f64-af32-0e44eafb9588","Type":"ContainerStarted","Data":"b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4"} Dec 13 04:05:16 crc kubenswrapper[5070]: I1213 04:05:16.060240 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p658l" podStartSLOduration=26.032653205 podStartE2EDuration="28.060218983s" podCreationTimestamp="2025-12-13 04:04:48 +0000 UTC" firstStartedPulling="2025-12-13 04:05:13.450854636 +0000 UTC m=+3205.686698192" lastFinishedPulling="2025-12-13 04:05:15.478420424 +0000 UTC m=+3207.714263970" observedRunningTime="2025-12-13 04:05:16.039498767 +0000 UTC m=+3208.275342313" watchObservedRunningTime="2025-12-13 04:05:16.060218983 +0000 UTC m=+3208.296062529" Dec 13 04:05:17 crc kubenswrapper[5070]: I1213 04:05:17.032833 5070 generic.go:334] "Generic (PLEG): container finished" podID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerID="b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4" exitCode=0 Dec 13 04:05:17 crc kubenswrapper[5070]: I1213 04:05:17.032887 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w26qp" event={"ID":"1d76cdfb-92e1-4f64-af32-0e44eafb9588","Type":"ContainerDied","Data":"b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4"} Dec 13 04:05:18 crc kubenswrapper[5070]: I1213 04:05:18.046469 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w26qp" event={"ID":"1d76cdfb-92e1-4f64-af32-0e44eafb9588","Type":"ContainerStarted","Data":"eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519"} Dec 13 04:05:18 crc kubenswrapper[5070]: I1213 04:05:18.078869 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-w26qp" podStartSLOduration=13.537360453 podStartE2EDuration="16.078845046s" podCreationTimestamp="2025-12-13 04:05:02 +0000 UTC" firstStartedPulling="2025-12-13 04:05:15.012317753 +0000 UTC m=+3207.248161309" lastFinishedPulling="2025-12-13 04:05:17.553802356 +0000 UTC m=+3209.789645902" observedRunningTime="2025-12-13 04:05:18.066483719 +0000 UTC m=+3210.302327275" watchObservedRunningTime="2025-12-13 04:05:18.078845046 +0000 UTC m=+3210.314688612" Dec 13 04:05:18 crc kubenswrapper[5070]: I1213 04:05:18.581287 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:05:18 crc kubenswrapper[5070]: I1213 04:05:18.581420 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:05:18 crc kubenswrapper[5070]: I1213 04:05:18.631592 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:05:20 crc kubenswrapper[5070]: I1213 04:05:20.151034 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:05:21 crc kubenswrapper[5070]: I1213 04:05:21.242493 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p658l"] Dec 13 04:05:22 crc kubenswrapper[5070]: I1213 04:05:22.088260 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p658l" podUID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerName="registry-server" containerID="cri-o://2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc" gracePeriod=2 Dec 13 04:05:22 crc kubenswrapper[5070]: I1213 04:05:22.974626 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:22 crc kubenswrapper[5070]: I1213 04:05:22.975904 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.019385 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.048952 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.102361 5070 generic.go:334] "Generic (PLEG): container finished" podID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerID="2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc" exitCode=0 Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.102471 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p658l" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.102522 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p658l" event={"ID":"791a319b-d49f-46c0-ad64-4fbc3ff12524","Type":"ContainerDied","Data":"2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc"} Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.102564 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p658l" event={"ID":"791a319b-d49f-46c0-ad64-4fbc3ff12524","Type":"ContainerDied","Data":"dc33813435ad8cdc86468eadfab13a5e2e0cbf2d809bfa0c662f8c5d7bc9a550"} Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.102586 5070 scope.go:117] "RemoveContainer" containerID="2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.127759 5070 scope.go:117] "RemoveContainer" containerID="ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.130301 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvlzs\" (UniqueName: \"kubernetes.io/projected/791a319b-d49f-46c0-ad64-4fbc3ff12524-kube-api-access-pvlzs\") pod \"791a319b-d49f-46c0-ad64-4fbc3ff12524\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.130718 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-utilities\") pod \"791a319b-d49f-46c0-ad64-4fbc3ff12524\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.130757 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-catalog-content\") pod \"791a319b-d49f-46c0-ad64-4fbc3ff12524\" (UID: \"791a319b-d49f-46c0-ad64-4fbc3ff12524\") " Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.136900 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/791a319b-d49f-46c0-ad64-4fbc3ff12524-kube-api-access-pvlzs" (OuterVolumeSpecName: "kube-api-access-pvlzs") pod "791a319b-d49f-46c0-ad64-4fbc3ff12524" (UID: "791a319b-d49f-46c0-ad64-4fbc3ff12524"). InnerVolumeSpecName "kube-api-access-pvlzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.143392 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-utilities" (OuterVolumeSpecName: "utilities") pod "791a319b-d49f-46c0-ad64-4fbc3ff12524" (UID: "791a319b-d49f-46c0-ad64-4fbc3ff12524"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.159733 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "791a319b-d49f-46c0-ad64-4fbc3ff12524" (UID: "791a319b-d49f-46c0-ad64-4fbc3ff12524"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.162721 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.204133 5070 scope.go:117] "RemoveContainer" containerID="6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.234726 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.234753 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/791a319b-d49f-46c0-ad64-4fbc3ff12524-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.234765 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvlzs\" (UniqueName: \"kubernetes.io/projected/791a319b-d49f-46c0-ad64-4fbc3ff12524-kube-api-access-pvlzs\") on node \"crc\" DevicePath \"\"" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.249159 5070 scope.go:117] "RemoveContainer" containerID="2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc" Dec 13 04:05:23 crc kubenswrapper[5070]: E1213 04:05:23.249585 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc\": container with ID starting with 2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc not found: ID does not exist" containerID="2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.249618 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc"} err="failed to get container status \"2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc\": rpc error: code = NotFound desc = could not find container \"2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc\": container with ID starting with 2fac68c3583ff75b9ddb21d2f51be11ae93a3140dfa2ae4172ddd1791e66e9fc not found: ID does not exist" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.249644 5070 scope.go:117] "RemoveContainer" containerID="ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b" Dec 13 04:05:23 crc kubenswrapper[5070]: E1213 04:05:23.250130 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b\": container with ID starting with ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b not found: ID does not exist" containerID="ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.250157 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b"} err="failed to get container status \"ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b\": rpc error: code = NotFound desc = could not find container \"ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b\": container with ID starting with ef8d533d91378fbc000c2b10b5ba4d12a5f4cfc513e3f9281db419dc8a7f286b not found: ID does not exist" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.250170 5070 scope.go:117] "RemoveContainer" containerID="6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab" Dec 13 04:05:23 crc kubenswrapper[5070]: E1213 04:05:23.250427 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab\": container with ID starting with 6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab not found: ID does not exist" containerID="6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.250460 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab"} err="failed to get container status \"6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab\": rpc error: code = NotFound desc = could not find container \"6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab\": container with ID starting with 6ae442f026a548fc109d45c21381895a2d8078471445e8c48d21c6ef9fcd88ab not found: ID does not exist" Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.453620 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p658l"] Dec 13 04:05:23 crc kubenswrapper[5070]: I1213 04:05:23.461733 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p658l"] Dec 13 04:05:24 crc kubenswrapper[5070]: I1213 04:05:24.056537 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w26qp"] Dec 13 04:05:24 crc kubenswrapper[5070]: I1213 04:05:24.179357 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="791a319b-d49f-46c0-ad64-4fbc3ff12524" path="/var/lib/kubelet/pods/791a319b-d49f-46c0-ad64-4fbc3ff12524/volumes" Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.124575 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-w26qp" podUID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerName="registry-server" containerID="cri-o://eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519" gracePeriod=2 Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.618274 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.684606 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-catalog-content\") pod \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.684756 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6dxl\" (UniqueName: \"kubernetes.io/projected/1d76cdfb-92e1-4f64-af32-0e44eafb9588-kube-api-access-k6dxl\") pod \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.684819 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-utilities\") pod \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\" (UID: \"1d76cdfb-92e1-4f64-af32-0e44eafb9588\") " Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.687723 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-utilities" (OuterVolumeSpecName: "utilities") pod "1d76cdfb-92e1-4f64-af32-0e44eafb9588" (UID: "1d76cdfb-92e1-4f64-af32-0e44eafb9588"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.691660 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d76cdfb-92e1-4f64-af32-0e44eafb9588-kube-api-access-k6dxl" (OuterVolumeSpecName: "kube-api-access-k6dxl") pod "1d76cdfb-92e1-4f64-af32-0e44eafb9588" (UID: "1d76cdfb-92e1-4f64-af32-0e44eafb9588"). InnerVolumeSpecName "kube-api-access-k6dxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.750164 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d76cdfb-92e1-4f64-af32-0e44eafb9588" (UID: "1d76cdfb-92e1-4f64-af32-0e44eafb9588"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.787771 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6dxl\" (UniqueName: \"kubernetes.io/projected/1d76cdfb-92e1-4f64-af32-0e44eafb9588-kube-api-access-k6dxl\") on node \"crc\" DevicePath \"\"" Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.787816 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:05:25 crc kubenswrapper[5070]: I1213 04:05:25.787825 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d76cdfb-92e1-4f64-af32-0e44eafb9588-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.135093 5070 generic.go:334] "Generic (PLEG): container finished" podID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerID="eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519" exitCode=0 Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.135163 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w26qp" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.135160 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w26qp" event={"ID":"1d76cdfb-92e1-4f64-af32-0e44eafb9588","Type":"ContainerDied","Data":"eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519"} Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.135454 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w26qp" event={"ID":"1d76cdfb-92e1-4f64-af32-0e44eafb9588","Type":"ContainerDied","Data":"d83f0deddd5db76aaf8041b3acd2096483b883cd76923a313e3813f176ecb80f"} Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.135479 5070 scope.go:117] "RemoveContainer" containerID="eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.154220 5070 scope.go:117] "RemoveContainer" containerID="b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.171491 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:05:26 crc kubenswrapper[5070]: E1213 04:05:26.171787 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.183515 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w26qp"] Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.193058 5070 scope.go:117] "RemoveContainer" containerID="153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.198499 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-w26qp"] Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.241987 5070 scope.go:117] "RemoveContainer" containerID="eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519" Dec 13 04:05:26 crc kubenswrapper[5070]: E1213 04:05:26.242365 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519\": container with ID starting with eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519 not found: ID does not exist" containerID="eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.242393 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519"} err="failed to get container status \"eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519\": rpc error: code = NotFound desc = could not find container \"eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519\": container with ID starting with eaaacd5927c229b981f576587f7ca57c18b3878b2f3172964e09ce0ed95b8519 not found: ID does not exist" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.242418 5070 scope.go:117] "RemoveContainer" containerID="b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4" Dec 13 04:05:26 crc kubenswrapper[5070]: E1213 04:05:26.243084 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4\": container with ID starting with b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4 not found: ID does not exist" containerID="b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.243134 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4"} err="failed to get container status \"b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4\": rpc error: code = NotFound desc = could not find container \"b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4\": container with ID starting with b6f4cc41b2327761f5065d7bd104cb2bc81343e4e4f01f77fcfa55ee36a9d8d4 not found: ID does not exist" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.243168 5070 scope.go:117] "RemoveContainer" containerID="153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3" Dec 13 04:05:26 crc kubenswrapper[5070]: E1213 04:05:26.243521 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3\": container with ID starting with 153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3 not found: ID does not exist" containerID="153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3" Dec 13 04:05:26 crc kubenswrapper[5070]: I1213 04:05:26.243558 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3"} err="failed to get container status \"153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3\": rpc error: code = NotFound desc = could not find container \"153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3\": container with ID starting with 153fde5078a45391a0aa69749df9b54ef43b1b6e7a5a60c1db1cd2fc6a841af3 not found: ID does not exist" Dec 13 04:05:28 crc kubenswrapper[5070]: I1213 04:05:28.177144 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" path="/var/lib/kubelet/pods/1d76cdfb-92e1-4f64-af32-0e44eafb9588/volumes" Dec 13 04:05:30 crc kubenswrapper[5070]: I1213 04:05:30.398196 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 13 04:05:32 crc kubenswrapper[5070]: I1213 04:05:32.197710 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7414f018-4f89-4d33-a19a-af5e996ba16b","Type":"ContainerStarted","Data":"0f5c088cb0c5752ab3827eb14e29cb4b8aee7f15fb455b63fa7f235191d6cdf2"} Dec 13 04:05:32 crc kubenswrapper[5070]: I1213 04:05:32.219306 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.832578294 podStartE2EDuration="49.219281015s" podCreationTimestamp="2025-12-13 04:04:43 +0000 UTC" firstStartedPulling="2025-12-13 04:04:45.007580396 +0000 UTC m=+3177.243423942" lastFinishedPulling="2025-12-13 04:05:30.394283117 +0000 UTC m=+3222.630126663" observedRunningTime="2025-12-13 04:05:32.215194124 +0000 UTC m=+3224.451037670" watchObservedRunningTime="2025-12-13 04:05:32.219281015 +0000 UTC m=+3224.455124571" Dec 13 04:05:40 crc kubenswrapper[5070]: I1213 04:05:40.167771 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:05:40 crc kubenswrapper[5070]: E1213 04:05:40.169198 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:05:55 crc kubenswrapper[5070]: I1213 04:05:55.167467 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:05:55 crc kubenswrapper[5070]: E1213 04:05:55.168372 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:06:09 crc kubenswrapper[5070]: I1213 04:06:09.167552 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:06:09 crc kubenswrapper[5070]: E1213 04:06:09.168362 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:06:21 crc kubenswrapper[5070]: I1213 04:06:21.167332 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:06:21 crc kubenswrapper[5070]: E1213 04:06:21.168305 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:06:36 crc kubenswrapper[5070]: I1213 04:06:36.168848 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:06:36 crc kubenswrapper[5070]: E1213 04:06:36.169728 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:06:50 crc kubenswrapper[5070]: I1213 04:06:50.167308 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:06:50 crc kubenswrapper[5070]: E1213 04:06:50.168369 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:07:03 crc kubenswrapper[5070]: I1213 04:07:03.167093 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:07:03 crc kubenswrapper[5070]: E1213 04:07:03.167963 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:07:17 crc kubenswrapper[5070]: I1213 04:07:17.167675 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:07:17 crc kubenswrapper[5070]: E1213 04:07:17.168554 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:07:31 crc kubenswrapper[5070]: I1213 04:07:31.168129 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:07:31 crc kubenswrapper[5070]: E1213 04:07:31.169022 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:07:42 crc kubenswrapper[5070]: I1213 04:07:42.167396 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:07:42 crc kubenswrapper[5070]: E1213 04:07:42.168377 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:07:55 crc kubenswrapper[5070]: I1213 04:07:55.166917 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:07:55 crc kubenswrapper[5070]: E1213 04:07:55.167523 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.600795 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hhw2g"] Dec 13 04:08:03 crc kubenswrapper[5070]: E1213 04:08:03.602030 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerName="extract-content" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.602047 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerName="extract-content" Dec 13 04:08:03 crc kubenswrapper[5070]: E1213 04:08:03.602094 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerName="extract-utilities" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.602103 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerName="extract-utilities" Dec 13 04:08:03 crc kubenswrapper[5070]: E1213 04:08:03.602120 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerName="registry-server" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.602137 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerName="registry-server" Dec 13 04:08:03 crc kubenswrapper[5070]: E1213 04:08:03.602217 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerName="extract-content" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.602226 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerName="extract-content" Dec 13 04:08:03 crc kubenswrapper[5070]: E1213 04:08:03.602292 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerName="extract-utilities" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.602301 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerName="extract-utilities" Dec 13 04:08:03 crc kubenswrapper[5070]: E1213 04:08:03.602316 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerName="registry-server" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.602326 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerName="registry-server" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.602922 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d76cdfb-92e1-4f64-af32-0e44eafb9588" containerName="registry-server" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.602974 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="791a319b-d49f-46c0-ad64-4fbc3ff12524" containerName="registry-server" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.604629 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.617142 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hhw2g"] Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.780665 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-catalog-content\") pod \"redhat-operators-hhw2g\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.780723 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-utilities\") pod \"redhat-operators-hhw2g\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.781105 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9gph\" (UniqueName: \"kubernetes.io/projected/cb5210b7-3d04-4595-b69a-404b02beab71-kube-api-access-l9gph\") pod \"redhat-operators-hhw2g\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.882852 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9gph\" (UniqueName: \"kubernetes.io/projected/cb5210b7-3d04-4595-b69a-404b02beab71-kube-api-access-l9gph\") pod \"redhat-operators-hhw2g\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.883214 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-catalog-content\") pod \"redhat-operators-hhw2g\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.883236 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-utilities\") pod \"redhat-operators-hhw2g\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.883923 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-utilities\") pod \"redhat-operators-hhw2g\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.884098 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-catalog-content\") pod \"redhat-operators-hhw2g\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.906211 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9gph\" (UniqueName: \"kubernetes.io/projected/cb5210b7-3d04-4595-b69a-404b02beab71-kube-api-access-l9gph\") pod \"redhat-operators-hhw2g\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:03 crc kubenswrapper[5070]: I1213 04:08:03.990989 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:04 crc kubenswrapper[5070]: I1213 04:08:04.470835 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hhw2g"] Dec 13 04:08:04 crc kubenswrapper[5070]: I1213 04:08:04.806150 5070 generic.go:334] "Generic (PLEG): container finished" podID="cb5210b7-3d04-4595-b69a-404b02beab71" containerID="39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087" exitCode=0 Dec 13 04:08:04 crc kubenswrapper[5070]: I1213 04:08:04.806358 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hhw2g" event={"ID":"cb5210b7-3d04-4595-b69a-404b02beab71","Type":"ContainerDied","Data":"39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087"} Dec 13 04:08:04 crc kubenswrapper[5070]: I1213 04:08:04.806422 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hhw2g" event={"ID":"cb5210b7-3d04-4595-b69a-404b02beab71","Type":"ContainerStarted","Data":"261ed69c4cdf661a262d71650626ba48048c0bddf89c2a995d3d452e44b93b0c"} Dec 13 04:08:04 crc kubenswrapper[5070]: I1213 04:08:04.808065 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 04:08:06 crc kubenswrapper[5070]: I1213 04:08:06.848343 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hhw2g" event={"ID":"cb5210b7-3d04-4595-b69a-404b02beab71","Type":"ContainerStarted","Data":"1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4"} Dec 13 04:08:07 crc kubenswrapper[5070]: I1213 04:08:07.870298 5070 generic.go:334] "Generic (PLEG): container finished" podID="cb5210b7-3d04-4595-b69a-404b02beab71" containerID="1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4" exitCode=0 Dec 13 04:08:07 crc kubenswrapper[5070]: I1213 04:08:07.870349 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hhw2g" event={"ID":"cb5210b7-3d04-4595-b69a-404b02beab71","Type":"ContainerDied","Data":"1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4"} Dec 13 04:08:09 crc kubenswrapper[5070]: I1213 04:08:09.891364 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hhw2g" event={"ID":"cb5210b7-3d04-4595-b69a-404b02beab71","Type":"ContainerStarted","Data":"c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572"} Dec 13 04:08:09 crc kubenswrapper[5070]: I1213 04:08:09.914881 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hhw2g" podStartSLOduration=3.391337621 podStartE2EDuration="6.914863757s" podCreationTimestamp="2025-12-13 04:08:03 +0000 UTC" firstStartedPulling="2025-12-13 04:08:04.807851712 +0000 UTC m=+3377.043695258" lastFinishedPulling="2025-12-13 04:08:08.331377808 +0000 UTC m=+3380.567221394" observedRunningTime="2025-12-13 04:08:09.910387304 +0000 UTC m=+3382.146230860" watchObservedRunningTime="2025-12-13 04:08:09.914863757 +0000 UTC m=+3382.150707293" Dec 13 04:08:10 crc kubenswrapper[5070]: I1213 04:08:10.168138 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:08:10 crc kubenswrapper[5070]: E1213 04:08:10.168615 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:08:13 crc kubenswrapper[5070]: I1213 04:08:13.991472 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:13 crc kubenswrapper[5070]: I1213 04:08:13.992013 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:15 crc kubenswrapper[5070]: I1213 04:08:15.037396 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hhw2g" podUID="cb5210b7-3d04-4595-b69a-404b02beab71" containerName="registry-server" probeResult="failure" output=< Dec 13 04:08:15 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 04:08:15 crc kubenswrapper[5070]: > Dec 13 04:08:24 crc kubenswrapper[5070]: I1213 04:08:24.057929 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:24 crc kubenswrapper[5070]: I1213 04:08:24.116887 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:24 crc kubenswrapper[5070]: I1213 04:08:24.167117 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:08:24 crc kubenswrapper[5070]: E1213 04:08:24.167464 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:08:24 crc kubenswrapper[5070]: I1213 04:08:24.311278 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hhw2g"] Dec 13 04:08:26 crc kubenswrapper[5070]: I1213 04:08:26.064887 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hhw2g" podUID="cb5210b7-3d04-4595-b69a-404b02beab71" containerName="registry-server" containerID="cri-o://c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572" gracePeriod=2 Dec 13 04:08:26 crc kubenswrapper[5070]: I1213 04:08:26.677169 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:26 crc kubenswrapper[5070]: I1213 04:08:26.814803 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9gph\" (UniqueName: \"kubernetes.io/projected/cb5210b7-3d04-4595-b69a-404b02beab71-kube-api-access-l9gph\") pod \"cb5210b7-3d04-4595-b69a-404b02beab71\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " Dec 13 04:08:26 crc kubenswrapper[5070]: I1213 04:08:26.814962 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-utilities\") pod \"cb5210b7-3d04-4595-b69a-404b02beab71\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " Dec 13 04:08:26 crc kubenswrapper[5070]: I1213 04:08:26.815088 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-catalog-content\") pod \"cb5210b7-3d04-4595-b69a-404b02beab71\" (UID: \"cb5210b7-3d04-4595-b69a-404b02beab71\") " Dec 13 04:08:26 crc kubenswrapper[5070]: I1213 04:08:26.817624 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-utilities" (OuterVolumeSpecName: "utilities") pod "cb5210b7-3d04-4595-b69a-404b02beab71" (UID: "cb5210b7-3d04-4595-b69a-404b02beab71"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:08:26 crc kubenswrapper[5070]: I1213 04:08:26.823704 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb5210b7-3d04-4595-b69a-404b02beab71-kube-api-access-l9gph" (OuterVolumeSpecName: "kube-api-access-l9gph") pod "cb5210b7-3d04-4595-b69a-404b02beab71" (UID: "cb5210b7-3d04-4595-b69a-404b02beab71"). InnerVolumeSpecName "kube-api-access-l9gph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:08:26 crc kubenswrapper[5070]: I1213 04:08:26.917527 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9gph\" (UniqueName: \"kubernetes.io/projected/cb5210b7-3d04-4595-b69a-404b02beab71-kube-api-access-l9gph\") on node \"crc\" DevicePath \"\"" Dec 13 04:08:26 crc kubenswrapper[5070]: I1213 04:08:26.917572 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:08:26 crc kubenswrapper[5070]: I1213 04:08:26.957385 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb5210b7-3d04-4595-b69a-404b02beab71" (UID: "cb5210b7-3d04-4595-b69a-404b02beab71"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.020091 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb5210b7-3d04-4595-b69a-404b02beab71-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.077306 5070 generic.go:334] "Generic (PLEG): container finished" podID="cb5210b7-3d04-4595-b69a-404b02beab71" containerID="c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572" exitCode=0 Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.077351 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hhw2g" event={"ID":"cb5210b7-3d04-4595-b69a-404b02beab71","Type":"ContainerDied","Data":"c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572"} Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.077383 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hhw2g" event={"ID":"cb5210b7-3d04-4595-b69a-404b02beab71","Type":"ContainerDied","Data":"261ed69c4cdf661a262d71650626ba48048c0bddf89c2a995d3d452e44b93b0c"} Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.077400 5070 scope.go:117] "RemoveContainer" containerID="c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.077428 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hhw2g" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.144140 5070 scope.go:117] "RemoveContainer" containerID="1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.147635 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hhw2g"] Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.164259 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hhw2g"] Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.173005 5070 scope.go:117] "RemoveContainer" containerID="39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.233228 5070 scope.go:117] "RemoveContainer" containerID="c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572" Dec 13 04:08:27 crc kubenswrapper[5070]: E1213 04:08:27.234137 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572\": container with ID starting with c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572 not found: ID does not exist" containerID="c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.234216 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572"} err="failed to get container status \"c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572\": rpc error: code = NotFound desc = could not find container \"c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572\": container with ID starting with c64e86c55c8a94ff7d03280d9001bb341298a849b61562d0024f283b53a98572 not found: ID does not exist" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.234251 5070 scope.go:117] "RemoveContainer" containerID="1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4" Dec 13 04:08:27 crc kubenswrapper[5070]: E1213 04:08:27.234830 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4\": container with ID starting with 1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4 not found: ID does not exist" containerID="1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.234863 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4"} err="failed to get container status \"1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4\": rpc error: code = NotFound desc = could not find container \"1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4\": container with ID starting with 1adabc0956fe1014aa716a18b6d7dc118e9dd46653e78124c16ffaaf0dae57d4 not found: ID does not exist" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.234883 5070 scope.go:117] "RemoveContainer" containerID="39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087" Dec 13 04:08:27 crc kubenswrapper[5070]: E1213 04:08:27.235151 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087\": container with ID starting with 39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087 not found: ID does not exist" containerID="39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087" Dec 13 04:08:27 crc kubenswrapper[5070]: I1213 04:08:27.235177 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087"} err="failed to get container status \"39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087\": rpc error: code = NotFound desc = could not find container \"39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087\": container with ID starting with 39c027c3801832aebce235fe8031e00c35f704787770f65c8630bf29a37c6087 not found: ID does not exist" Dec 13 04:08:28 crc kubenswrapper[5070]: I1213 04:08:28.184581 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb5210b7-3d04-4595-b69a-404b02beab71" path="/var/lib/kubelet/pods/cb5210b7-3d04-4595-b69a-404b02beab71/volumes" Dec 13 04:08:39 crc kubenswrapper[5070]: I1213 04:08:39.166704 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:08:39 crc kubenswrapper[5070]: E1213 04:08:39.167621 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:08:52 crc kubenswrapper[5070]: I1213 04:08:52.166651 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:08:52 crc kubenswrapper[5070]: E1213 04:08:52.167411 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:09:07 crc kubenswrapper[5070]: I1213 04:09:07.166907 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:09:07 crc kubenswrapper[5070]: E1213 04:09:07.167717 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:09:19 crc kubenswrapper[5070]: I1213 04:09:19.167851 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:09:19 crc kubenswrapper[5070]: E1213 04:09:19.168464 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:09:31 crc kubenswrapper[5070]: I1213 04:09:31.167004 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:09:31 crc kubenswrapper[5070]: I1213 04:09:31.739848 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"e660e2b38c90e4283a2723dd23e9dd40961cf40dc90557d51b457490537d9b4a"} Dec 13 04:09:31 crc kubenswrapper[5070]: I1213 04:09:31.891148 5070 scope.go:117] "RemoveContainer" containerID="903321bf578d07a2bd364ff87cad0439861337300c5e1b292b0f12feb32bae74" Dec 13 04:09:31 crc kubenswrapper[5070]: I1213 04:09:31.915822 5070 scope.go:117] "RemoveContainer" containerID="f6ca60f0eb94658e725e11f48aa06546403474a7434df704dc0dc7be0a44dcb8" Dec 13 04:11:51 crc kubenswrapper[5070]: I1213 04:11:51.943599 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:11:51 crc kubenswrapper[5070]: I1213 04:11:51.944150 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.355345 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6ktqr"] Dec 13 04:12:21 crc kubenswrapper[5070]: E1213 04:12:21.357269 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb5210b7-3d04-4595-b69a-404b02beab71" containerName="extract-content" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.357294 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb5210b7-3d04-4595-b69a-404b02beab71" containerName="extract-content" Dec 13 04:12:21 crc kubenswrapper[5070]: E1213 04:12:21.357323 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb5210b7-3d04-4595-b69a-404b02beab71" containerName="registry-server" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.357330 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb5210b7-3d04-4595-b69a-404b02beab71" containerName="registry-server" Dec 13 04:12:21 crc kubenswrapper[5070]: E1213 04:12:21.357362 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb5210b7-3d04-4595-b69a-404b02beab71" containerName="extract-utilities" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.357369 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb5210b7-3d04-4595-b69a-404b02beab71" containerName="extract-utilities" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.357695 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb5210b7-3d04-4595-b69a-404b02beab71" containerName="registry-server" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.359485 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.379958 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6ktqr"] Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.536662 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-utilities\") pod \"certified-operators-6ktqr\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.536865 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-catalog-content\") pod \"certified-operators-6ktqr\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.536979 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hhpm\" (UniqueName: \"kubernetes.io/projected/5821feb9-4999-44bd-95ab-0214dfa04e3a-kube-api-access-4hhpm\") pod \"certified-operators-6ktqr\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.638818 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hhpm\" (UniqueName: \"kubernetes.io/projected/5821feb9-4999-44bd-95ab-0214dfa04e3a-kube-api-access-4hhpm\") pod \"certified-operators-6ktqr\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.638899 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-utilities\") pod \"certified-operators-6ktqr\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.638994 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-catalog-content\") pod \"certified-operators-6ktqr\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.639528 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-catalog-content\") pod \"certified-operators-6ktqr\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.640042 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-utilities\") pod \"certified-operators-6ktqr\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.660712 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hhpm\" (UniqueName: \"kubernetes.io/projected/5821feb9-4999-44bd-95ab-0214dfa04e3a-kube-api-access-4hhpm\") pod \"certified-operators-6ktqr\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.720090 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.943037 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:12:21 crc kubenswrapper[5070]: I1213 04:12:21.943289 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:12:22 crc kubenswrapper[5070]: I1213 04:12:22.047640 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-7ncs9"] Dec 13 04:12:22 crc kubenswrapper[5070]: I1213 04:12:22.056920 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-7ncs9"] Dec 13 04:12:22 crc kubenswrapper[5070]: I1213 04:12:22.179120 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1e14e43-fceb-409e-8d3b-9e0435a19506" path="/var/lib/kubelet/pods/b1e14e43-fceb-409e-8d3b-9e0435a19506/volumes" Dec 13 04:12:22 crc kubenswrapper[5070]: I1213 04:12:22.187435 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6ktqr"] Dec 13 04:12:22 crc kubenswrapper[5070]: I1213 04:12:22.498945 5070 generic.go:334] "Generic (PLEG): container finished" podID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerID="eeb42da77f66a2ffc84a2e39be23fc4dae9fc3a4684564cc1f7652d2f743f3ef" exitCode=0 Dec 13 04:12:22 crc kubenswrapper[5070]: I1213 04:12:22.499124 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ktqr" event={"ID":"5821feb9-4999-44bd-95ab-0214dfa04e3a","Type":"ContainerDied","Data":"eeb42da77f66a2ffc84a2e39be23fc4dae9fc3a4684564cc1f7652d2f743f3ef"} Dec 13 04:12:22 crc kubenswrapper[5070]: I1213 04:12:22.499263 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ktqr" event={"ID":"5821feb9-4999-44bd-95ab-0214dfa04e3a","Type":"ContainerStarted","Data":"5431d582913c13e5f019167a75ee862ebee552f716da842606e11daec4f308bb"} Dec 13 04:12:23 crc kubenswrapper[5070]: I1213 04:12:23.522419 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ktqr" event={"ID":"5821feb9-4999-44bd-95ab-0214dfa04e3a","Type":"ContainerStarted","Data":"4ce0932d4459010f6a7e16aabb213e433aa4f4186a2894cea36ddee5e3cd11ed"} Dec 13 04:12:25 crc kubenswrapper[5070]: I1213 04:12:25.543122 5070 generic.go:334] "Generic (PLEG): container finished" podID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerID="4ce0932d4459010f6a7e16aabb213e433aa4f4186a2894cea36ddee5e3cd11ed" exitCode=0 Dec 13 04:12:25 crc kubenswrapper[5070]: I1213 04:12:25.543212 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ktqr" event={"ID":"5821feb9-4999-44bd-95ab-0214dfa04e3a","Type":"ContainerDied","Data":"4ce0932d4459010f6a7e16aabb213e433aa4f4186a2894cea36ddee5e3cd11ed"} Dec 13 04:12:27 crc kubenswrapper[5070]: I1213 04:12:27.566299 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ktqr" event={"ID":"5821feb9-4999-44bd-95ab-0214dfa04e3a","Type":"ContainerStarted","Data":"259c1d968600a5724276f2744daac781c9b5e5560098a7af8351060e1c8b761e"} Dec 13 04:12:27 crc kubenswrapper[5070]: I1213 04:12:27.596813 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6ktqr" podStartSLOduration=2.768545164 podStartE2EDuration="6.596796006s" podCreationTimestamp="2025-12-13 04:12:21 +0000 UTC" firstStartedPulling="2025-12-13 04:12:22.501671394 +0000 UTC m=+3634.737514930" lastFinishedPulling="2025-12-13 04:12:26.329922216 +0000 UTC m=+3638.565765772" observedRunningTime="2025-12-13 04:12:27.591526152 +0000 UTC m=+3639.827369708" watchObservedRunningTime="2025-12-13 04:12:27.596796006 +0000 UTC m=+3639.832639552" Dec 13 04:12:31 crc kubenswrapper[5070]: I1213 04:12:31.721142 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:31 crc kubenswrapper[5070]: I1213 04:12:31.721725 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:32 crc kubenswrapper[5070]: I1213 04:12:32.015979 5070 scope.go:117] "RemoveContainer" containerID="d25d1774b928abc9074a4dc3428df243c34c6814f93634963b68396493ab5ea6" Dec 13 04:12:32 crc kubenswrapper[5070]: I1213 04:12:32.765324 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-6ktqr" podUID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerName="registry-server" probeResult="failure" output=< Dec 13 04:12:32 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 04:12:32 crc kubenswrapper[5070]: > Dec 13 04:12:36 crc kubenswrapper[5070]: I1213 04:12:36.044165 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-19fc-account-create-x6vnv"] Dec 13 04:12:36 crc kubenswrapper[5070]: I1213 04:12:36.053870 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-19fc-account-create-x6vnv"] Dec 13 04:12:36 crc kubenswrapper[5070]: I1213 04:12:36.180522 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0910bd27-a460-49c0-880a-47f2e595337e" path="/var/lib/kubelet/pods/0910bd27-a460-49c0-880a-47f2e595337e/volumes" Dec 13 04:12:41 crc kubenswrapper[5070]: I1213 04:12:41.768669 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:41 crc kubenswrapper[5070]: I1213 04:12:41.819116 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:44 crc kubenswrapper[5070]: I1213 04:12:44.366697 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6ktqr"] Dec 13 04:12:44 crc kubenswrapper[5070]: I1213 04:12:44.367415 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6ktqr" podUID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerName="registry-server" containerID="cri-o://259c1d968600a5724276f2744daac781c9b5e5560098a7af8351060e1c8b761e" gracePeriod=2 Dec 13 04:12:44 crc kubenswrapper[5070]: I1213 04:12:44.709016 5070 generic.go:334] "Generic (PLEG): container finished" podID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerID="259c1d968600a5724276f2744daac781c9b5e5560098a7af8351060e1c8b761e" exitCode=0 Dec 13 04:12:44 crc kubenswrapper[5070]: I1213 04:12:44.709066 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ktqr" event={"ID":"5821feb9-4999-44bd-95ab-0214dfa04e3a","Type":"ContainerDied","Data":"259c1d968600a5724276f2744daac781c9b5e5560098a7af8351060e1c8b761e"} Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.002003 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.126571 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-utilities\") pod \"5821feb9-4999-44bd-95ab-0214dfa04e3a\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.126664 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hhpm\" (UniqueName: \"kubernetes.io/projected/5821feb9-4999-44bd-95ab-0214dfa04e3a-kube-api-access-4hhpm\") pod \"5821feb9-4999-44bd-95ab-0214dfa04e3a\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.126739 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-catalog-content\") pod \"5821feb9-4999-44bd-95ab-0214dfa04e3a\" (UID: \"5821feb9-4999-44bd-95ab-0214dfa04e3a\") " Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.129693 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-utilities" (OuterVolumeSpecName: "utilities") pod "5821feb9-4999-44bd-95ab-0214dfa04e3a" (UID: "5821feb9-4999-44bd-95ab-0214dfa04e3a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.140229 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5821feb9-4999-44bd-95ab-0214dfa04e3a-kube-api-access-4hhpm" (OuterVolumeSpecName: "kube-api-access-4hhpm") pod "5821feb9-4999-44bd-95ab-0214dfa04e3a" (UID: "5821feb9-4999-44bd-95ab-0214dfa04e3a"). InnerVolumeSpecName "kube-api-access-4hhpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.191975 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5821feb9-4999-44bd-95ab-0214dfa04e3a" (UID: "5821feb9-4999-44bd-95ab-0214dfa04e3a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.229703 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.229752 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hhpm\" (UniqueName: \"kubernetes.io/projected/5821feb9-4999-44bd-95ab-0214dfa04e3a-kube-api-access-4hhpm\") on node \"crc\" DevicePath \"\"" Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.229768 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5821feb9-4999-44bd-95ab-0214dfa04e3a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.719920 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ktqr" event={"ID":"5821feb9-4999-44bd-95ab-0214dfa04e3a","Type":"ContainerDied","Data":"5431d582913c13e5f019167a75ee862ebee552f716da842606e11daec4f308bb"} Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.719983 5070 scope.go:117] "RemoveContainer" containerID="259c1d968600a5724276f2744daac781c9b5e5560098a7af8351060e1c8b761e" Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.720143 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6ktqr" Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.762033 5070 scope.go:117] "RemoveContainer" containerID="4ce0932d4459010f6a7e16aabb213e433aa4f4186a2894cea36ddee5e3cd11ed" Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.768219 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6ktqr"] Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.778704 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6ktqr"] Dec 13 04:12:45 crc kubenswrapper[5070]: I1213 04:12:45.787051 5070 scope.go:117] "RemoveContainer" containerID="eeb42da77f66a2ffc84a2e39be23fc4dae9fc3a4684564cc1f7652d2f743f3ef" Dec 13 04:12:46 crc kubenswrapper[5070]: I1213 04:12:46.178299 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5821feb9-4999-44bd-95ab-0214dfa04e3a" path="/var/lib/kubelet/pods/5821feb9-4999-44bd-95ab-0214dfa04e3a/volumes" Dec 13 04:12:51 crc kubenswrapper[5070]: I1213 04:12:51.942546 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:12:51 crc kubenswrapper[5070]: I1213 04:12:51.944186 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:12:51 crc kubenswrapper[5070]: I1213 04:12:51.944386 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 04:12:51 crc kubenswrapper[5070]: I1213 04:12:51.945286 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e660e2b38c90e4283a2723dd23e9dd40961cf40dc90557d51b457490537d9b4a"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 04:12:51 crc kubenswrapper[5070]: I1213 04:12:51.945460 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://e660e2b38c90e4283a2723dd23e9dd40961cf40dc90557d51b457490537d9b4a" gracePeriod=600 Dec 13 04:12:52 crc kubenswrapper[5070]: I1213 04:12:52.780502 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="e660e2b38c90e4283a2723dd23e9dd40961cf40dc90557d51b457490537d9b4a" exitCode=0 Dec 13 04:12:52 crc kubenswrapper[5070]: I1213 04:12:52.780555 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"e660e2b38c90e4283a2723dd23e9dd40961cf40dc90557d51b457490537d9b4a"} Dec 13 04:12:52 crc kubenswrapper[5070]: I1213 04:12:52.781031 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813"} Dec 13 04:12:52 crc kubenswrapper[5070]: I1213 04:12:52.781050 5070 scope.go:117] "RemoveContainer" containerID="9efe948923eb00e6c9ce648b3696834a4b2264ea119eca5d1fc7e464ff7d2298" Dec 13 04:13:00 crc kubenswrapper[5070]: I1213 04:13:00.040856 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-gl79z"] Dec 13 04:13:00 crc kubenswrapper[5070]: I1213 04:13:00.050751 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-gl79z"] Dec 13 04:13:00 crc kubenswrapper[5070]: I1213 04:13:00.177428 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f534dac-0129-4309-b670-2dcfd808b721" path="/var/lib/kubelet/pods/6f534dac-0129-4309-b670-2dcfd808b721/volumes" Dec 13 04:13:32 crc kubenswrapper[5070]: I1213 04:13:32.098770 5070 scope.go:117] "RemoveContainer" containerID="5a277d25ba080dfaec47e89b6a7a40d80918488b53f9b2287ae863096fc8f07d" Dec 13 04:13:32 crc kubenswrapper[5070]: I1213 04:13:32.135413 5070 scope.go:117] "RemoveContainer" containerID="eb71c9717b90de3c4ee25eb9b3359b6ac5ea57f331120c04c155b5ce592bdfcd" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.177542 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm"] Dec 13 04:15:00 crc kubenswrapper[5070]: E1213 04:15:00.178412 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerName="extract-utilities" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.178428 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerName="extract-utilities" Dec 13 04:15:00 crc kubenswrapper[5070]: E1213 04:15:00.178470 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerName="registry-server" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.178479 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerName="registry-server" Dec 13 04:15:00 crc kubenswrapper[5070]: E1213 04:15:00.178490 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerName="extract-content" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.178497 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerName="extract-content" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.178742 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="5821feb9-4999-44bd-95ab-0214dfa04e3a" containerName="registry-server" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.179612 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.183348 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.184914 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.192404 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm"] Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.341087 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wj9f6\" (UniqueName: \"kubernetes.io/projected/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-kube-api-access-wj9f6\") pod \"collect-profiles-29426655-sxngm\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.341203 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-secret-volume\") pod \"collect-profiles-29426655-sxngm\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.342857 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-config-volume\") pod \"collect-profiles-29426655-sxngm\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.445334 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-secret-volume\") pod \"collect-profiles-29426655-sxngm\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.445845 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-config-volume\") pod \"collect-profiles-29426655-sxngm\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.445937 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wj9f6\" (UniqueName: \"kubernetes.io/projected/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-kube-api-access-wj9f6\") pod \"collect-profiles-29426655-sxngm\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.447792 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-config-volume\") pod \"collect-profiles-29426655-sxngm\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.464805 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-secret-volume\") pod \"collect-profiles-29426655-sxngm\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.470085 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wj9f6\" (UniqueName: \"kubernetes.io/projected/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-kube-api-access-wj9f6\") pod \"collect-profiles-29426655-sxngm\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.502681 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:00 crc kubenswrapper[5070]: I1213 04:15:00.994358 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm"] Dec 13 04:15:01 crc kubenswrapper[5070]: I1213 04:15:01.958626 5070 generic.go:334] "Generic (PLEG): container finished" podID="f32dbf5c-c410-4c61-bb8e-3ed1663f580d" containerID="233670b0b1584c29836742b06df778b4ada930c36db4164e6794343ff416a212" exitCode=0 Dec 13 04:15:01 crc kubenswrapper[5070]: I1213 04:15:01.958672 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" event={"ID":"f32dbf5c-c410-4c61-bb8e-3ed1663f580d","Type":"ContainerDied","Data":"233670b0b1584c29836742b06df778b4ada930c36db4164e6794343ff416a212"} Dec 13 04:15:01 crc kubenswrapper[5070]: I1213 04:15:01.959103 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" event={"ID":"f32dbf5c-c410-4c61-bb8e-3ed1663f580d","Type":"ContainerStarted","Data":"253819e27a3e82cfa063365083108ab817788fd35cc6a2f2f570996c91baa915"} Dec 13 04:15:03 crc kubenswrapper[5070]: I1213 04:15:03.805721 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:03 crc kubenswrapper[5070]: I1213 04:15:03.910583 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-secret-volume\") pod \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " Dec 13 04:15:03 crc kubenswrapper[5070]: I1213 04:15:03.910631 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wj9f6\" (UniqueName: \"kubernetes.io/projected/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-kube-api-access-wj9f6\") pod \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " Dec 13 04:15:03 crc kubenswrapper[5070]: I1213 04:15:03.910666 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-config-volume\") pod \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\" (UID: \"f32dbf5c-c410-4c61-bb8e-3ed1663f580d\") " Dec 13 04:15:03 crc kubenswrapper[5070]: I1213 04:15:03.912121 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-config-volume" (OuterVolumeSpecName: "config-volume") pod "f32dbf5c-c410-4c61-bb8e-3ed1663f580d" (UID: "f32dbf5c-c410-4c61-bb8e-3ed1663f580d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:15:03 crc kubenswrapper[5070]: I1213 04:15:03.930586 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f32dbf5c-c410-4c61-bb8e-3ed1663f580d" (UID: "f32dbf5c-c410-4c61-bb8e-3ed1663f580d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:15:03 crc kubenswrapper[5070]: I1213 04:15:03.939693 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-kube-api-access-wj9f6" (OuterVolumeSpecName: "kube-api-access-wj9f6") pod "f32dbf5c-c410-4c61-bb8e-3ed1663f580d" (UID: "f32dbf5c-c410-4c61-bb8e-3ed1663f580d"). InnerVolumeSpecName "kube-api-access-wj9f6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:15:03 crc kubenswrapper[5070]: I1213 04:15:03.977337 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" event={"ID":"f32dbf5c-c410-4c61-bb8e-3ed1663f580d","Type":"ContainerDied","Data":"253819e27a3e82cfa063365083108ab817788fd35cc6a2f2f570996c91baa915"} Dec 13 04:15:03 crc kubenswrapper[5070]: I1213 04:15:03.977625 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="253819e27a3e82cfa063365083108ab817788fd35cc6a2f2f570996c91baa915" Dec 13 04:15:03 crc kubenswrapper[5070]: I1213 04:15:03.977395 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426655-sxngm" Dec 13 04:15:04 crc kubenswrapper[5070]: I1213 04:15:04.013527 5070 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 04:15:04 crc kubenswrapper[5070]: I1213 04:15:04.013666 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wj9f6\" (UniqueName: \"kubernetes.io/projected/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-kube-api-access-wj9f6\") on node \"crc\" DevicePath \"\"" Dec 13 04:15:04 crc kubenswrapper[5070]: I1213 04:15:04.013722 5070 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f32dbf5c-c410-4c61-bb8e-3ed1663f580d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 04:15:04 crc kubenswrapper[5070]: I1213 04:15:04.884238 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw"] Dec 13 04:15:04 crc kubenswrapper[5070]: I1213 04:15:04.897882 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426610-wvnmw"] Dec 13 04:15:06 crc kubenswrapper[5070]: I1213 04:15:06.184293 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e2f5c73-7275-4235-8577-09c4c10cdfb4" path="/var/lib/kubelet/pods/7e2f5c73-7275-4235-8577-09c4c10cdfb4/volumes" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.474809 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6rpzf"] Dec 13 04:15:09 crc kubenswrapper[5070]: E1213 04:15:09.475924 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f32dbf5c-c410-4c61-bb8e-3ed1663f580d" containerName="collect-profiles" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.475938 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="f32dbf5c-c410-4c61-bb8e-3ed1663f580d" containerName="collect-profiles" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.476114 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="f32dbf5c-c410-4c61-bb8e-3ed1663f580d" containerName="collect-profiles" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.477712 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.492880 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6rpzf"] Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.524176 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-catalog-content\") pod \"redhat-marketplace-6rpzf\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.524248 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-utilities\") pod \"redhat-marketplace-6rpzf\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.524733 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2spzb\" (UniqueName: \"kubernetes.io/projected/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-kube-api-access-2spzb\") pod \"redhat-marketplace-6rpzf\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.627744 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2spzb\" (UniqueName: \"kubernetes.io/projected/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-kube-api-access-2spzb\") pod \"redhat-marketplace-6rpzf\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.627846 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-catalog-content\") pod \"redhat-marketplace-6rpzf\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.627882 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-utilities\") pod \"redhat-marketplace-6rpzf\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.628357 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-utilities\") pod \"redhat-marketplace-6rpzf\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.628527 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-catalog-content\") pod \"redhat-marketplace-6rpzf\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.652548 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2spzb\" (UniqueName: \"kubernetes.io/projected/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-kube-api-access-2spzb\") pod \"redhat-marketplace-6rpzf\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:09 crc kubenswrapper[5070]: I1213 04:15:09.834850 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:10 crc kubenswrapper[5070]: I1213 04:15:10.396154 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6rpzf"] Dec 13 04:15:11 crc kubenswrapper[5070]: I1213 04:15:11.035556 5070 generic.go:334] "Generic (PLEG): container finished" podID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerID="2ffc87928096842565c6f4fb167bb8478b0770544515ee038988976b74622131" exitCode=0 Dec 13 04:15:11 crc kubenswrapper[5070]: I1213 04:15:11.035610 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6rpzf" event={"ID":"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4","Type":"ContainerDied","Data":"2ffc87928096842565c6f4fb167bb8478b0770544515ee038988976b74622131"} Dec 13 04:15:11 crc kubenswrapper[5070]: I1213 04:15:11.035889 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6rpzf" event={"ID":"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4","Type":"ContainerStarted","Data":"bda2ebafb8e34d842bd33eeb7f85d78181721130c4ed9c6b739430c16313fd82"} Dec 13 04:15:11 crc kubenswrapper[5070]: I1213 04:15:11.038042 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 04:15:12 crc kubenswrapper[5070]: I1213 04:15:12.047708 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6rpzf" event={"ID":"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4","Type":"ContainerStarted","Data":"31b7f085268d29dac371d2a08cc356d1e09e4e6d548f93340fd27b5e4f883ba1"} Dec 13 04:15:13 crc kubenswrapper[5070]: I1213 04:15:13.058084 5070 generic.go:334] "Generic (PLEG): container finished" podID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerID="31b7f085268d29dac371d2a08cc356d1e09e4e6d548f93340fd27b5e4f883ba1" exitCode=0 Dec 13 04:15:13 crc kubenswrapper[5070]: I1213 04:15:13.058127 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6rpzf" event={"ID":"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4","Type":"ContainerDied","Data":"31b7f085268d29dac371d2a08cc356d1e09e4e6d548f93340fd27b5e4f883ba1"} Dec 13 04:15:14 crc kubenswrapper[5070]: I1213 04:15:14.069281 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6rpzf" event={"ID":"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4","Type":"ContainerStarted","Data":"fba1168dd8ea5141ee352ea666599023b8023f18602ba7027348effe1e7225d5"} Dec 13 04:15:14 crc kubenswrapper[5070]: I1213 04:15:14.088110 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6rpzf" podStartSLOduration=2.453355892 podStartE2EDuration="5.088088312s" podCreationTimestamp="2025-12-13 04:15:09 +0000 UTC" firstStartedPulling="2025-12-13 04:15:11.037567489 +0000 UTC m=+3803.273411035" lastFinishedPulling="2025-12-13 04:15:13.672299899 +0000 UTC m=+3805.908143455" observedRunningTime="2025-12-13 04:15:14.085631845 +0000 UTC m=+3806.321475401" watchObservedRunningTime="2025-12-13 04:15:14.088088312 +0000 UTC m=+3806.323931858" Dec 13 04:15:19 crc kubenswrapper[5070]: I1213 04:15:19.835635 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:19 crc kubenswrapper[5070]: I1213 04:15:19.836158 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:19 crc kubenswrapper[5070]: I1213 04:15:19.887560 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:20 crc kubenswrapper[5070]: I1213 04:15:20.177557 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:20 crc kubenswrapper[5070]: I1213 04:15:20.225211 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6rpzf"] Dec 13 04:15:21 crc kubenswrapper[5070]: I1213 04:15:21.942824 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:15:21 crc kubenswrapper[5070]: I1213 04:15:21.943387 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:15:22 crc kubenswrapper[5070]: I1213 04:15:22.150651 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6rpzf" podUID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerName="registry-server" containerID="cri-o://fba1168dd8ea5141ee352ea666599023b8023f18602ba7027348effe1e7225d5" gracePeriod=2 Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.163020 5070 generic.go:334] "Generic (PLEG): container finished" podID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerID="fba1168dd8ea5141ee352ea666599023b8023f18602ba7027348effe1e7225d5" exitCode=0 Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.163226 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6rpzf" event={"ID":"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4","Type":"ContainerDied","Data":"fba1168dd8ea5141ee352ea666599023b8023f18602ba7027348effe1e7225d5"} Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.353564 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.463656 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-catalog-content\") pod \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.463795 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2spzb\" (UniqueName: \"kubernetes.io/projected/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-kube-api-access-2spzb\") pod \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.463882 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-utilities\") pod \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\" (UID: \"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4\") " Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.464841 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-utilities" (OuterVolumeSpecName: "utilities") pod "d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" (UID: "d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.479020 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-kube-api-access-2spzb" (OuterVolumeSpecName: "kube-api-access-2spzb") pod "d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" (UID: "d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4"). InnerVolumeSpecName "kube-api-access-2spzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.488375 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" (UID: "d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.565796 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.565833 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:15:23 crc kubenswrapper[5070]: I1213 04:15:23.565844 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2spzb\" (UniqueName: \"kubernetes.io/projected/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4-kube-api-access-2spzb\") on node \"crc\" DevicePath \"\"" Dec 13 04:15:24 crc kubenswrapper[5070]: I1213 04:15:24.176527 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6rpzf" Dec 13 04:15:24 crc kubenswrapper[5070]: I1213 04:15:24.180102 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6rpzf" event={"ID":"d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4","Type":"ContainerDied","Data":"bda2ebafb8e34d842bd33eeb7f85d78181721130c4ed9c6b739430c16313fd82"} Dec 13 04:15:24 crc kubenswrapper[5070]: I1213 04:15:24.180155 5070 scope.go:117] "RemoveContainer" containerID="fba1168dd8ea5141ee352ea666599023b8023f18602ba7027348effe1e7225d5" Dec 13 04:15:24 crc kubenswrapper[5070]: I1213 04:15:24.202740 5070 scope.go:117] "RemoveContainer" containerID="31b7f085268d29dac371d2a08cc356d1e09e4e6d548f93340fd27b5e4f883ba1" Dec 13 04:15:24 crc kubenswrapper[5070]: I1213 04:15:24.221544 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6rpzf"] Dec 13 04:15:24 crc kubenswrapper[5070]: I1213 04:15:24.230362 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6rpzf"] Dec 13 04:15:24 crc kubenswrapper[5070]: I1213 04:15:24.707937 5070 scope.go:117] "RemoveContainer" containerID="2ffc87928096842565c6f4fb167bb8478b0770544515ee038988976b74622131" Dec 13 04:15:26 crc kubenswrapper[5070]: I1213 04:15:26.179232 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" path="/var/lib/kubelet/pods/d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4/volumes" Dec 13 04:15:32 crc kubenswrapper[5070]: I1213 04:15:32.308246 5070 scope.go:117] "RemoveContainer" containerID="ded122847230ffaa8f7c1dabc241665ebac8a6034eeac1cc2d992e75dcd74de8" Dec 13 04:15:38 crc kubenswrapper[5070]: I1213 04:15:38.951257 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zsw7g"] Dec 13 04:15:38 crc kubenswrapper[5070]: E1213 04:15:38.952238 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerName="extract-utilities" Dec 13 04:15:38 crc kubenswrapper[5070]: I1213 04:15:38.952255 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerName="extract-utilities" Dec 13 04:15:38 crc kubenswrapper[5070]: E1213 04:15:38.952298 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerName="registry-server" Dec 13 04:15:38 crc kubenswrapper[5070]: I1213 04:15:38.952306 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerName="registry-server" Dec 13 04:15:38 crc kubenswrapper[5070]: E1213 04:15:38.952345 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerName="extract-content" Dec 13 04:15:38 crc kubenswrapper[5070]: I1213 04:15:38.952354 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerName="extract-content" Dec 13 04:15:38 crc kubenswrapper[5070]: I1213 04:15:38.952610 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9d5dc5f-e4a2-4b86-8a36-5c154f41e9e4" containerName="registry-server" Dec 13 04:15:38 crc kubenswrapper[5070]: I1213 04:15:38.954171 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:38 crc kubenswrapper[5070]: I1213 04:15:38.961318 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zsw7g"] Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.085835 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-catalog-content\") pod \"community-operators-zsw7g\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.085933 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-utilities\") pod \"community-operators-zsw7g\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.085965 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2c4b\" (UniqueName: \"kubernetes.io/projected/03123ea9-1176-4fda-956f-c5f114c01d8a-kube-api-access-g2c4b\") pod \"community-operators-zsw7g\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.188263 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-catalog-content\") pod \"community-operators-zsw7g\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.188354 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-utilities\") pod \"community-operators-zsw7g\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.188389 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2c4b\" (UniqueName: \"kubernetes.io/projected/03123ea9-1176-4fda-956f-c5f114c01d8a-kube-api-access-g2c4b\") pod \"community-operators-zsw7g\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.189038 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-catalog-content\") pod \"community-operators-zsw7g\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.189167 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-utilities\") pod \"community-operators-zsw7g\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.206529 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2c4b\" (UniqueName: \"kubernetes.io/projected/03123ea9-1176-4fda-956f-c5f114c01d8a-kube-api-access-g2c4b\") pod \"community-operators-zsw7g\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.278320 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:39 crc kubenswrapper[5070]: I1213 04:15:39.735692 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zsw7g"] Dec 13 04:15:40 crc kubenswrapper[5070]: I1213 04:15:40.351794 5070 generic.go:334] "Generic (PLEG): container finished" podID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerID="a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6" exitCode=0 Dec 13 04:15:40 crc kubenswrapper[5070]: I1213 04:15:40.352049 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zsw7g" event={"ID":"03123ea9-1176-4fda-956f-c5f114c01d8a","Type":"ContainerDied","Data":"a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6"} Dec 13 04:15:40 crc kubenswrapper[5070]: I1213 04:15:40.352074 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zsw7g" event={"ID":"03123ea9-1176-4fda-956f-c5f114c01d8a","Type":"ContainerStarted","Data":"a967c2c5a6a2bd4d2a301e270c62201fbbb62ea23e5cbedef6598ba34e1f0768"} Dec 13 04:15:42 crc kubenswrapper[5070]: I1213 04:15:42.373163 5070 generic.go:334] "Generic (PLEG): container finished" podID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerID="a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251" exitCode=0 Dec 13 04:15:42 crc kubenswrapper[5070]: I1213 04:15:42.373226 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zsw7g" event={"ID":"03123ea9-1176-4fda-956f-c5f114c01d8a","Type":"ContainerDied","Data":"a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251"} Dec 13 04:15:44 crc kubenswrapper[5070]: I1213 04:15:44.391714 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zsw7g" event={"ID":"03123ea9-1176-4fda-956f-c5f114c01d8a","Type":"ContainerStarted","Data":"4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e"} Dec 13 04:15:44 crc kubenswrapper[5070]: I1213 04:15:44.413091 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zsw7g" podStartSLOduration=3.924860492 podStartE2EDuration="6.41307076s" podCreationTimestamp="2025-12-13 04:15:38 +0000 UTC" firstStartedPulling="2025-12-13 04:15:40.353649477 +0000 UTC m=+3832.589493023" lastFinishedPulling="2025-12-13 04:15:42.841859745 +0000 UTC m=+3835.077703291" observedRunningTime="2025-12-13 04:15:44.408377562 +0000 UTC m=+3836.644221108" watchObservedRunningTime="2025-12-13 04:15:44.41307076 +0000 UTC m=+3836.648914306" Dec 13 04:15:49 crc kubenswrapper[5070]: I1213 04:15:49.279583 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:49 crc kubenswrapper[5070]: I1213 04:15:49.280626 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:49 crc kubenswrapper[5070]: I1213 04:15:49.336189 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:49 crc kubenswrapper[5070]: I1213 04:15:49.515236 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:49 crc kubenswrapper[5070]: I1213 04:15:49.576176 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zsw7g"] Dec 13 04:15:51 crc kubenswrapper[5070]: I1213 04:15:51.470872 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zsw7g" podUID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerName="registry-server" containerID="cri-o://4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e" gracePeriod=2 Dec 13 04:15:51 crc kubenswrapper[5070]: I1213 04:15:51.943478 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:15:51 crc kubenswrapper[5070]: I1213 04:15:51.944073 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:15:51 crc kubenswrapper[5070]: I1213 04:15:51.979774 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.083128 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2c4b\" (UniqueName: \"kubernetes.io/projected/03123ea9-1176-4fda-956f-c5f114c01d8a-kube-api-access-g2c4b\") pod \"03123ea9-1176-4fda-956f-c5f114c01d8a\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.083284 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-catalog-content\") pod \"03123ea9-1176-4fda-956f-c5f114c01d8a\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.083439 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-utilities\") pod \"03123ea9-1176-4fda-956f-c5f114c01d8a\" (UID: \"03123ea9-1176-4fda-956f-c5f114c01d8a\") " Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.084345 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-utilities" (OuterVolumeSpecName: "utilities") pod "03123ea9-1176-4fda-956f-c5f114c01d8a" (UID: "03123ea9-1176-4fda-956f-c5f114c01d8a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.089887 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03123ea9-1176-4fda-956f-c5f114c01d8a-kube-api-access-g2c4b" (OuterVolumeSpecName: "kube-api-access-g2c4b") pod "03123ea9-1176-4fda-956f-c5f114c01d8a" (UID: "03123ea9-1176-4fda-956f-c5f114c01d8a"). InnerVolumeSpecName "kube-api-access-g2c4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.176885 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03123ea9-1176-4fda-956f-c5f114c01d8a" (UID: "03123ea9-1176-4fda-956f-c5f114c01d8a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.185682 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.185722 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03123ea9-1176-4fda-956f-c5f114c01d8a-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.185737 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2c4b\" (UniqueName: \"kubernetes.io/projected/03123ea9-1176-4fda-956f-c5f114c01d8a-kube-api-access-g2c4b\") on node \"crc\" DevicePath \"\"" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.481586 5070 generic.go:334] "Generic (PLEG): container finished" podID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerID="4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e" exitCode=0 Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.481634 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zsw7g" event={"ID":"03123ea9-1176-4fda-956f-c5f114c01d8a","Type":"ContainerDied","Data":"4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e"} Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.481686 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zsw7g" event={"ID":"03123ea9-1176-4fda-956f-c5f114c01d8a","Type":"ContainerDied","Data":"a967c2c5a6a2bd4d2a301e270c62201fbbb62ea23e5cbedef6598ba34e1f0768"} Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.481707 5070 scope.go:117] "RemoveContainer" containerID="4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.481653 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zsw7g" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.521939 5070 scope.go:117] "RemoveContainer" containerID="a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.528293 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zsw7g"] Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.537583 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zsw7g"] Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.562860 5070 scope.go:117] "RemoveContainer" containerID="a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.611566 5070 scope.go:117] "RemoveContainer" containerID="4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e" Dec 13 04:15:52 crc kubenswrapper[5070]: E1213 04:15:52.611899 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e\": container with ID starting with 4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e not found: ID does not exist" containerID="4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.611936 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e"} err="failed to get container status \"4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e\": rpc error: code = NotFound desc = could not find container \"4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e\": container with ID starting with 4b4352b0c23c56effd027169635248c8d0c32b5d48c5e94090bbb8e9fa34e39e not found: ID does not exist" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.611963 5070 scope.go:117] "RemoveContainer" containerID="a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251" Dec 13 04:15:52 crc kubenswrapper[5070]: E1213 04:15:52.612255 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251\": container with ID starting with a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251 not found: ID does not exist" containerID="a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.612293 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251"} err="failed to get container status \"a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251\": rpc error: code = NotFound desc = could not find container \"a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251\": container with ID starting with a6b61d0850142fd5bbfc0d3d507d22eda4888d0f5aba78c68b1d419f59e87251 not found: ID does not exist" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.612319 5070 scope.go:117] "RemoveContainer" containerID="a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6" Dec 13 04:15:52 crc kubenswrapper[5070]: E1213 04:15:52.612590 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6\": container with ID starting with a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6 not found: ID does not exist" containerID="a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6" Dec 13 04:15:52 crc kubenswrapper[5070]: I1213 04:15:52.612620 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6"} err="failed to get container status \"a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6\": rpc error: code = NotFound desc = could not find container \"a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6\": container with ID starting with a884571fc61bc5e7d83ae3c09e89f9cc396649d3384d893dedf9e246d5c581e6 not found: ID does not exist" Dec 13 04:15:54 crc kubenswrapper[5070]: I1213 04:15:54.183010 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03123ea9-1176-4fda-956f-c5f114c01d8a" path="/var/lib/kubelet/pods/03123ea9-1176-4fda-956f-c5f114c01d8a/volumes" Dec 13 04:16:21 crc kubenswrapper[5070]: I1213 04:16:21.943172 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:16:21 crc kubenswrapper[5070]: I1213 04:16:21.943650 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:16:21 crc kubenswrapper[5070]: I1213 04:16:21.943693 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 04:16:21 crc kubenswrapper[5070]: I1213 04:16:21.944552 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 04:16:21 crc kubenswrapper[5070]: I1213 04:16:21.944610 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" gracePeriod=600 Dec 13 04:16:22 crc kubenswrapper[5070]: E1213 04:16:22.076077 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:16:22 crc kubenswrapper[5070]: I1213 04:16:22.727047 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" exitCode=0 Dec 13 04:16:22 crc kubenswrapper[5070]: I1213 04:16:22.727122 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813"} Dec 13 04:16:22 crc kubenswrapper[5070]: I1213 04:16:22.727375 5070 scope.go:117] "RemoveContainer" containerID="e660e2b38c90e4283a2723dd23e9dd40961cf40dc90557d51b457490537d9b4a" Dec 13 04:16:22 crc kubenswrapper[5070]: I1213 04:16:22.727974 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:16:22 crc kubenswrapper[5070]: E1213 04:16:22.728223 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:16:33 crc kubenswrapper[5070]: I1213 04:16:33.168252 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:16:33 crc kubenswrapper[5070]: E1213 04:16:33.169480 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:16:45 crc kubenswrapper[5070]: I1213 04:16:45.166819 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:16:45 crc kubenswrapper[5070]: E1213 04:16:45.167628 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:16:56 crc kubenswrapper[5070]: I1213 04:16:56.167501 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:16:56 crc kubenswrapper[5070]: E1213 04:16:56.168188 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:17:10 crc kubenswrapper[5070]: I1213 04:17:10.167150 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:17:10 crc kubenswrapper[5070]: E1213 04:17:10.167988 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:17:25 crc kubenswrapper[5070]: I1213 04:17:25.167559 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:17:25 crc kubenswrapper[5070]: E1213 04:17:25.168493 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:17:40 crc kubenswrapper[5070]: I1213 04:17:40.168000 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:17:40 crc kubenswrapper[5070]: E1213 04:17:40.168727 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:17:51 crc kubenswrapper[5070]: I1213 04:17:51.167315 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:17:51 crc kubenswrapper[5070]: E1213 04:17:51.168606 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:18:03 crc kubenswrapper[5070]: I1213 04:18:03.167003 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:18:03 crc kubenswrapper[5070]: E1213 04:18:03.167859 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:18:18 crc kubenswrapper[5070]: I1213 04:18:18.173886 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:18:18 crc kubenswrapper[5070]: E1213 04:18:18.174862 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:18:32 crc kubenswrapper[5070]: I1213 04:18:32.166677 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:18:32 crc kubenswrapper[5070]: E1213 04:18:32.167656 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.231006 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bg9lw"] Dec 13 04:18:40 crc kubenswrapper[5070]: E1213 04:18:40.231919 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerName="registry-server" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.231935 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerName="registry-server" Dec 13 04:18:40 crc kubenswrapper[5070]: E1213 04:18:40.231961 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerName="extract-utilities" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.231969 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerName="extract-utilities" Dec 13 04:18:40 crc kubenswrapper[5070]: E1213 04:18:40.231985 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerName="extract-content" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.231992 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerName="extract-content" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.232234 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="03123ea9-1176-4fda-956f-c5f114c01d8a" containerName="registry-server" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.233804 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.248416 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bg9lw"] Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.403523 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-catalog-content\") pod \"redhat-operators-bg9lw\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.403615 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-767dv\" (UniqueName: \"kubernetes.io/projected/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-kube-api-access-767dv\") pod \"redhat-operators-bg9lw\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.403654 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-utilities\") pod \"redhat-operators-bg9lw\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.505058 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-catalog-content\") pod \"redhat-operators-bg9lw\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.505139 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-767dv\" (UniqueName: \"kubernetes.io/projected/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-kube-api-access-767dv\") pod \"redhat-operators-bg9lw\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.505179 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-utilities\") pod \"redhat-operators-bg9lw\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.505951 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-catalog-content\") pod \"redhat-operators-bg9lw\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.505960 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-utilities\") pod \"redhat-operators-bg9lw\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.530762 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-767dv\" (UniqueName: \"kubernetes.io/projected/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-kube-api-access-767dv\") pod \"redhat-operators-bg9lw\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:40 crc kubenswrapper[5070]: I1213 04:18:40.557953 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:41 crc kubenswrapper[5070]: I1213 04:18:41.086364 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bg9lw"] Dec 13 04:18:42 crc kubenswrapper[5070]: I1213 04:18:42.030300 5070 generic.go:334] "Generic (PLEG): container finished" podID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerID="7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f" exitCode=0 Dec 13 04:18:42 crc kubenswrapper[5070]: I1213 04:18:42.030372 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bg9lw" event={"ID":"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77","Type":"ContainerDied","Data":"7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f"} Dec 13 04:18:42 crc kubenswrapper[5070]: I1213 04:18:42.030870 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bg9lw" event={"ID":"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77","Type":"ContainerStarted","Data":"9de5b4c70dd01bb2a16d375597f7668a5cfd30c63e47e840db98da755898a6a5"} Dec 13 04:18:43 crc kubenswrapper[5070]: I1213 04:18:43.040334 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bg9lw" event={"ID":"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77","Type":"ContainerStarted","Data":"ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7"} Dec 13 04:18:45 crc kubenswrapper[5070]: I1213 04:18:45.060545 5070 generic.go:334] "Generic (PLEG): container finished" podID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerID="ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7" exitCode=0 Dec 13 04:18:45 crc kubenswrapper[5070]: I1213 04:18:45.060628 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bg9lw" event={"ID":"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77","Type":"ContainerDied","Data":"ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7"} Dec 13 04:18:46 crc kubenswrapper[5070]: I1213 04:18:46.070916 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bg9lw" event={"ID":"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77","Type":"ContainerStarted","Data":"9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b"} Dec 13 04:18:46 crc kubenswrapper[5070]: I1213 04:18:46.092060 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bg9lw" podStartSLOduration=2.615808107 podStartE2EDuration="6.092043995s" podCreationTimestamp="2025-12-13 04:18:40 +0000 UTC" firstStartedPulling="2025-12-13 04:18:42.032752426 +0000 UTC m=+4014.268595972" lastFinishedPulling="2025-12-13 04:18:45.508988314 +0000 UTC m=+4017.744831860" observedRunningTime="2025-12-13 04:18:46.087233074 +0000 UTC m=+4018.323076640" watchObservedRunningTime="2025-12-13 04:18:46.092043995 +0000 UTC m=+4018.327887541" Dec 13 04:18:47 crc kubenswrapper[5070]: I1213 04:18:47.168134 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:18:47 crc kubenswrapper[5070]: E1213 04:18:47.168717 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:18:50 crc kubenswrapper[5070]: I1213 04:18:50.558922 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:50 crc kubenswrapper[5070]: I1213 04:18:50.560496 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:18:51 crc kubenswrapper[5070]: I1213 04:18:51.611915 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bg9lw" podUID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerName="registry-server" probeResult="failure" output=< Dec 13 04:18:51 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 04:18:51 crc kubenswrapper[5070]: > Dec 13 04:19:00 crc kubenswrapper[5070]: I1213 04:19:00.609130 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:19:00 crc kubenswrapper[5070]: I1213 04:19:00.657195 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:19:00 crc kubenswrapper[5070]: I1213 04:19:00.851413 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bg9lw"] Dec 13 04:19:01 crc kubenswrapper[5070]: I1213 04:19:01.167379 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:19:01 crc kubenswrapper[5070]: E1213 04:19:01.167775 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:19:02 crc kubenswrapper[5070]: I1213 04:19:02.199917 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bg9lw" podUID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerName="registry-server" containerID="cri-o://9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b" gracePeriod=2 Dec 13 04:19:02 crc kubenswrapper[5070]: I1213 04:19:02.808427 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:19:02 crc kubenswrapper[5070]: I1213 04:19:02.910603 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-767dv\" (UniqueName: \"kubernetes.io/projected/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-kube-api-access-767dv\") pod \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " Dec 13 04:19:02 crc kubenswrapper[5070]: I1213 04:19:02.910945 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-utilities\") pod \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " Dec 13 04:19:02 crc kubenswrapper[5070]: I1213 04:19:02.911845 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-catalog-content\") pod \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\" (UID: \"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77\") " Dec 13 04:19:02 crc kubenswrapper[5070]: I1213 04:19:02.912020 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-utilities" (OuterVolumeSpecName: "utilities") pod "3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" (UID: "3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:19:02 crc kubenswrapper[5070]: I1213 04:19:02.916376 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:19:02 crc kubenswrapper[5070]: I1213 04:19:02.918713 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-kube-api-access-767dv" (OuterVolumeSpecName: "kube-api-access-767dv") pod "3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" (UID: "3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77"). InnerVolumeSpecName "kube-api-access-767dv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.018750 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-767dv\" (UniqueName: \"kubernetes.io/projected/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-kube-api-access-767dv\") on node \"crc\" DevicePath \"\"" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.029938 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" (UID: "3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.121295 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.210086 5070 generic.go:334] "Generic (PLEG): container finished" podID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerID="9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b" exitCode=0 Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.210153 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bg9lw" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.210195 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bg9lw" event={"ID":"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77","Type":"ContainerDied","Data":"9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b"} Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.211728 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bg9lw" event={"ID":"3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77","Type":"ContainerDied","Data":"9de5b4c70dd01bb2a16d375597f7668a5cfd30c63e47e840db98da755898a6a5"} Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.211764 5070 scope.go:117] "RemoveContainer" containerID="9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.236338 5070 scope.go:117] "RemoveContainer" containerID="ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.257582 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bg9lw"] Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.266913 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bg9lw"] Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.271232 5070 scope.go:117] "RemoveContainer" containerID="7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.306071 5070 scope.go:117] "RemoveContainer" containerID="9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b" Dec 13 04:19:03 crc kubenswrapper[5070]: E1213 04:19:03.306584 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b\": container with ID starting with 9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b not found: ID does not exist" containerID="9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.306640 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b"} err="failed to get container status \"9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b\": rpc error: code = NotFound desc = could not find container \"9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b\": container with ID starting with 9063ec948e1954235840ba31fb8fa04ac83cf36a4df990abab166aedf01d222b not found: ID does not exist" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.306675 5070 scope.go:117] "RemoveContainer" containerID="ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7" Dec 13 04:19:03 crc kubenswrapper[5070]: E1213 04:19:03.307162 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7\": container with ID starting with ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7 not found: ID does not exist" containerID="ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.307204 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7"} err="failed to get container status \"ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7\": rpc error: code = NotFound desc = could not find container \"ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7\": container with ID starting with ba1e86329b9fdf1ec1ad5b951cc448c5755da17fd3385acc9ba0adfb9dabfdc7 not found: ID does not exist" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.307236 5070 scope.go:117] "RemoveContainer" containerID="7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f" Dec 13 04:19:03 crc kubenswrapper[5070]: E1213 04:19:03.307812 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f\": container with ID starting with 7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f not found: ID does not exist" containerID="7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f" Dec 13 04:19:03 crc kubenswrapper[5070]: I1213 04:19:03.307843 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f"} err="failed to get container status \"7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f\": rpc error: code = NotFound desc = could not find container \"7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f\": container with ID starting with 7cf0216e3c2c051c487e03b495ac9f279811bf4a3dda019befd3a511e2e7339f not found: ID does not exist" Dec 13 04:19:04 crc kubenswrapper[5070]: I1213 04:19:04.179963 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" path="/var/lib/kubelet/pods/3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77/volumes" Dec 13 04:19:12 crc kubenswrapper[5070]: I1213 04:19:12.167566 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:19:12 crc kubenswrapper[5070]: E1213 04:19:12.168535 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:19:27 crc kubenswrapper[5070]: I1213 04:19:27.167890 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:19:27 crc kubenswrapper[5070]: E1213 04:19:27.168670 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:19:39 crc kubenswrapper[5070]: I1213 04:19:39.167738 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:19:39 crc kubenswrapper[5070]: E1213 04:19:39.168805 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:19:51 crc kubenswrapper[5070]: I1213 04:19:51.167669 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:19:51 crc kubenswrapper[5070]: E1213 04:19:51.168507 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:20:03 crc kubenswrapper[5070]: I1213 04:20:03.167376 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:20:03 crc kubenswrapper[5070]: E1213 04:20:03.168269 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:20:17 crc kubenswrapper[5070]: I1213 04:20:17.166855 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:20:17 crc kubenswrapper[5070]: E1213 04:20:17.167806 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:20:31 crc kubenswrapper[5070]: I1213 04:20:31.167461 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:20:31 crc kubenswrapper[5070]: E1213 04:20:31.168240 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:20:42 crc kubenswrapper[5070]: I1213 04:20:42.170580 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:20:42 crc kubenswrapper[5070]: E1213 04:20:42.171482 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:20:54 crc kubenswrapper[5070]: I1213 04:20:54.166746 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:20:54 crc kubenswrapper[5070]: E1213 04:20:54.167504 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:21:09 crc kubenswrapper[5070]: I1213 04:21:09.167357 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:21:09 crc kubenswrapper[5070]: E1213 04:21:09.169520 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:21:23 crc kubenswrapper[5070]: I1213 04:21:23.167757 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:21:23 crc kubenswrapper[5070]: I1213 04:21:23.549229 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"5c4c80ce18489e1afdd757bbada130f5afc77f20232af565446644cfcfe4e564"} Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.507873 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-drrns"] Dec 13 04:22:54 crc kubenswrapper[5070]: E1213 04:22:54.508783 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerName="extract-utilities" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.508796 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerName="extract-utilities" Dec 13 04:22:54 crc kubenswrapper[5070]: E1213 04:22:54.508825 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerName="registry-server" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.508831 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerName="registry-server" Dec 13 04:22:54 crc kubenswrapper[5070]: E1213 04:22:54.508874 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerName="extract-content" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.508881 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerName="extract-content" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.509085 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d94d6ec-42cc-4fb6-88e6-9c4ccaa14e77" containerName="registry-server" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.510952 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.535587 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-drrns"] Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.583725 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-utilities\") pod \"certified-operators-drrns\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.583787 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7w9db\" (UniqueName: \"kubernetes.io/projected/14537583-3b9e-43c8-8b88-5f30c2c4358b-kube-api-access-7w9db\") pod \"certified-operators-drrns\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.583815 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-catalog-content\") pod \"certified-operators-drrns\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.686080 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-utilities\") pod \"certified-operators-drrns\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.686727 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7w9db\" (UniqueName: \"kubernetes.io/projected/14537583-3b9e-43c8-8b88-5f30c2c4358b-kube-api-access-7w9db\") pod \"certified-operators-drrns\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.686897 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-catalog-content\") pod \"certified-operators-drrns\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.686722 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-utilities\") pod \"certified-operators-drrns\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.687317 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-catalog-content\") pod \"certified-operators-drrns\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.707784 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7w9db\" (UniqueName: \"kubernetes.io/projected/14537583-3b9e-43c8-8b88-5f30c2c4358b-kube-api-access-7w9db\") pod \"certified-operators-drrns\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:54 crc kubenswrapper[5070]: I1213 04:22:54.831255 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:22:57 crc kubenswrapper[5070]: W1213 04:22:57.920386 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14537583_3b9e_43c8_8b88_5f30c2c4358b.slice/crio-243801070e4acf75677c621f3471ab0283e2f20220572308db6fb5ac172fb568 WatchSource:0}: Error finding container 243801070e4acf75677c621f3471ab0283e2f20220572308db6fb5ac172fb568: Status 404 returned error can't find the container with id 243801070e4acf75677c621f3471ab0283e2f20220572308db6fb5ac172fb568 Dec 13 04:22:57 crc kubenswrapper[5070]: I1213 04:22:57.927222 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-drrns"] Dec 13 04:22:58 crc kubenswrapper[5070]: I1213 04:22:58.468378 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drrns" event={"ID":"14537583-3b9e-43c8-8b88-5f30c2c4358b","Type":"ContainerStarted","Data":"243801070e4acf75677c621f3471ab0283e2f20220572308db6fb5ac172fb568"} Dec 13 04:23:01 crc kubenswrapper[5070]: I1213 04:23:01.498296 5070 generic.go:334] "Generic (PLEG): container finished" podID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerID="03a71b2fcbbf34584d12ac5f61f20ec3666873f609452057603a42d719a37fdd" exitCode=0 Dec 13 04:23:01 crc kubenswrapper[5070]: I1213 04:23:01.498392 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drrns" event={"ID":"14537583-3b9e-43c8-8b88-5f30c2c4358b","Type":"ContainerDied","Data":"03a71b2fcbbf34584d12ac5f61f20ec3666873f609452057603a42d719a37fdd"} Dec 13 04:23:01 crc kubenswrapper[5070]: I1213 04:23:01.501725 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 04:23:02 crc kubenswrapper[5070]: I1213 04:23:02.512527 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drrns" event={"ID":"14537583-3b9e-43c8-8b88-5f30c2c4358b","Type":"ContainerStarted","Data":"9cedde0ef5740e293ea42317bb0e7ac31b62d6b95d813a815af5ca907fd93a86"} Dec 13 04:23:03 crc kubenswrapper[5070]: I1213 04:23:03.522781 5070 generic.go:334] "Generic (PLEG): container finished" podID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerID="9cedde0ef5740e293ea42317bb0e7ac31b62d6b95d813a815af5ca907fd93a86" exitCode=0 Dec 13 04:23:03 crc kubenswrapper[5070]: I1213 04:23:03.522868 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drrns" event={"ID":"14537583-3b9e-43c8-8b88-5f30c2c4358b","Type":"ContainerDied","Data":"9cedde0ef5740e293ea42317bb0e7ac31b62d6b95d813a815af5ca907fd93a86"} Dec 13 04:23:06 crc kubenswrapper[5070]: I1213 04:23:06.551581 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drrns" event={"ID":"14537583-3b9e-43c8-8b88-5f30c2c4358b","Type":"ContainerStarted","Data":"69662b8707eab5d8904b830b18fd5f438eac42e8039794a547df49e47a463eda"} Dec 13 04:23:06 crc kubenswrapper[5070]: I1213 04:23:06.576300 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-drrns" podStartSLOduration=8.724330599 podStartE2EDuration="12.576270457s" podCreationTimestamp="2025-12-13 04:22:54 +0000 UTC" firstStartedPulling="2025-12-13 04:23:01.501427818 +0000 UTC m=+4273.737271374" lastFinishedPulling="2025-12-13 04:23:05.353367676 +0000 UTC m=+4277.589211232" observedRunningTime="2025-12-13 04:23:06.569729099 +0000 UTC m=+4278.805572655" watchObservedRunningTime="2025-12-13 04:23:06.576270457 +0000 UTC m=+4278.812114013" Dec 13 04:23:14 crc kubenswrapper[5070]: I1213 04:23:14.832088 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:23:14 crc kubenswrapper[5070]: I1213 04:23:14.832642 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:23:14 crc kubenswrapper[5070]: I1213 04:23:14.897636 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:23:15 crc kubenswrapper[5070]: I1213 04:23:15.748266 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:23:15 crc kubenswrapper[5070]: I1213 04:23:15.812679 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-drrns"] Dec 13 04:23:17 crc kubenswrapper[5070]: I1213 04:23:17.647841 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-drrns" podUID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerName="registry-server" containerID="cri-o://69662b8707eab5d8904b830b18fd5f438eac42e8039794a547df49e47a463eda" gracePeriod=2 Dec 13 04:23:18 crc kubenswrapper[5070]: I1213 04:23:18.659345 5070 generic.go:334] "Generic (PLEG): container finished" podID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerID="69662b8707eab5d8904b830b18fd5f438eac42e8039794a547df49e47a463eda" exitCode=0 Dec 13 04:23:18 crc kubenswrapper[5070]: I1213 04:23:18.659427 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drrns" event={"ID":"14537583-3b9e-43c8-8b88-5f30c2c4358b","Type":"ContainerDied","Data":"69662b8707eab5d8904b830b18fd5f438eac42e8039794a547df49e47a463eda"} Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.396562 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.426066 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-catalog-content\") pod \"14537583-3b9e-43c8-8b88-5f30c2c4358b\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.426121 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7w9db\" (UniqueName: \"kubernetes.io/projected/14537583-3b9e-43c8-8b88-5f30c2c4358b-kube-api-access-7w9db\") pod \"14537583-3b9e-43c8-8b88-5f30c2c4358b\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.426239 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-utilities\") pod \"14537583-3b9e-43c8-8b88-5f30c2c4358b\" (UID: \"14537583-3b9e-43c8-8b88-5f30c2c4358b\") " Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.427582 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-utilities" (OuterVolumeSpecName: "utilities") pod "14537583-3b9e-43c8-8b88-5f30c2c4358b" (UID: "14537583-3b9e-43c8-8b88-5f30c2c4358b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.435419 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14537583-3b9e-43c8-8b88-5f30c2c4358b-kube-api-access-7w9db" (OuterVolumeSpecName: "kube-api-access-7w9db") pod "14537583-3b9e-43c8-8b88-5f30c2c4358b" (UID: "14537583-3b9e-43c8-8b88-5f30c2c4358b"). InnerVolumeSpecName "kube-api-access-7w9db". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.502087 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14537583-3b9e-43c8-8b88-5f30c2c4358b" (UID: "14537583-3b9e-43c8-8b88-5f30c2c4358b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.528715 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7w9db\" (UniqueName: \"kubernetes.io/projected/14537583-3b9e-43c8-8b88-5f30c2c4358b-kube-api-access-7w9db\") on node \"crc\" DevicePath \"\"" Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.528748 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.528759 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14537583-3b9e-43c8-8b88-5f30c2c4358b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.670792 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-drrns" event={"ID":"14537583-3b9e-43c8-8b88-5f30c2c4358b","Type":"ContainerDied","Data":"243801070e4acf75677c621f3471ab0283e2f20220572308db6fb5ac172fb568"} Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.670859 5070 scope.go:117] "RemoveContainer" containerID="69662b8707eab5d8904b830b18fd5f438eac42e8039794a547df49e47a463eda" Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.670910 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-drrns" Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.702272 5070 scope.go:117] "RemoveContainer" containerID="9cedde0ef5740e293ea42317bb0e7ac31b62d6b95d813a815af5ca907fd93a86" Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.706849 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-drrns"] Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.732000 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-drrns"] Dec 13 04:23:19 crc kubenswrapper[5070]: I1213 04:23:19.733989 5070 scope.go:117] "RemoveContainer" containerID="03a71b2fcbbf34584d12ac5f61f20ec3666873f609452057603a42d719a37fdd" Dec 13 04:23:20 crc kubenswrapper[5070]: I1213 04:23:20.177199 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14537583-3b9e-43c8-8b88-5f30c2c4358b" path="/var/lib/kubelet/pods/14537583-3b9e-43c8-8b88-5f30c2c4358b/volumes" Dec 13 04:23:51 crc kubenswrapper[5070]: I1213 04:23:51.942987 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:23:51 crc kubenswrapper[5070]: I1213 04:23:51.943615 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:24:21 crc kubenswrapper[5070]: I1213 04:24:21.942761 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:24:21 crc kubenswrapper[5070]: I1213 04:24:21.943272 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:24:51 crc kubenswrapper[5070]: I1213 04:24:51.942465 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:24:51 crc kubenswrapper[5070]: I1213 04:24:51.943040 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:24:51 crc kubenswrapper[5070]: I1213 04:24:51.943089 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 04:24:51 crc kubenswrapper[5070]: I1213 04:24:51.944223 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5c4c80ce18489e1afdd757bbada130f5afc77f20232af565446644cfcfe4e564"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 04:24:51 crc kubenswrapper[5070]: I1213 04:24:51.944363 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://5c4c80ce18489e1afdd757bbada130f5afc77f20232af565446644cfcfe4e564" gracePeriod=600 Dec 13 04:24:52 crc kubenswrapper[5070]: I1213 04:24:52.515302 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="5c4c80ce18489e1afdd757bbada130f5afc77f20232af565446644cfcfe4e564" exitCode=0 Dec 13 04:24:52 crc kubenswrapper[5070]: I1213 04:24:52.515412 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"5c4c80ce18489e1afdd757bbada130f5afc77f20232af565446644cfcfe4e564"} Dec 13 04:24:52 crc kubenswrapper[5070]: I1213 04:24:52.515986 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e"} Dec 13 04:24:52 crc kubenswrapper[5070]: I1213 04:24:52.516021 5070 scope.go:117] "RemoveContainer" containerID="0203f7d9003f44d1170bf88814ca0731b8efa6e7f302533498552dad96e07813" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.638928 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mwjq8"] Dec 13 04:26:28 crc kubenswrapper[5070]: E1213 04:26:28.641890 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerName="registry-server" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.641916 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerName="registry-server" Dec 13 04:26:28 crc kubenswrapper[5070]: E1213 04:26:28.641942 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerName="extract-utilities" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.641952 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerName="extract-utilities" Dec 13 04:26:28 crc kubenswrapper[5070]: E1213 04:26:28.641981 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerName="extract-content" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.641993 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerName="extract-content" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.642405 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="14537583-3b9e-43c8-8b88-5f30c2c4358b" containerName="registry-server" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.645952 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.651868 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mwjq8"] Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.749291 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfd2p\" (UniqueName: \"kubernetes.io/projected/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-kube-api-access-qfd2p\") pod \"community-operators-mwjq8\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.749747 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-catalog-content\") pod \"community-operators-mwjq8\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.749969 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-utilities\") pod \"community-operators-mwjq8\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.851388 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-utilities\") pod \"community-operators-mwjq8\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.851482 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfd2p\" (UniqueName: \"kubernetes.io/projected/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-kube-api-access-qfd2p\") pod \"community-operators-mwjq8\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.851547 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-catalog-content\") pod \"community-operators-mwjq8\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.852187 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-catalog-content\") pod \"community-operators-mwjq8\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.852427 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-utilities\") pod \"community-operators-mwjq8\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:28 crc kubenswrapper[5070]: I1213 04:26:28.883347 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfd2p\" (UniqueName: \"kubernetes.io/projected/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-kube-api-access-qfd2p\") pod \"community-operators-mwjq8\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:29 crc kubenswrapper[5070]: I1213 04:26:29.002804 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:29 crc kubenswrapper[5070]: I1213 04:26:29.680030 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mwjq8"] Dec 13 04:26:30 crc kubenswrapper[5070]: I1213 04:26:30.431085 5070 generic.go:334] "Generic (PLEG): container finished" podID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerID="90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438" exitCode=0 Dec 13 04:26:30 crc kubenswrapper[5070]: I1213 04:26:30.431358 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjq8" event={"ID":"bfd0656f-58fb-4ee8-a3b2-cc3e19296581","Type":"ContainerDied","Data":"90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438"} Dec 13 04:26:30 crc kubenswrapper[5070]: I1213 04:26:30.431433 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjq8" event={"ID":"bfd0656f-58fb-4ee8-a3b2-cc3e19296581","Type":"ContainerStarted","Data":"ce87dfc5d6fa99490d1fee54b1d51c41b9d0ff96987c2cbf932708e6150fc4a7"} Dec 13 04:26:31 crc kubenswrapper[5070]: I1213 04:26:31.822663 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-skhkz"] Dec 13 04:26:31 crc kubenswrapper[5070]: I1213 04:26:31.825888 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:31 crc kubenswrapper[5070]: I1213 04:26:31.847533 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-skhkz"] Dec 13 04:26:31 crc kubenswrapper[5070]: I1213 04:26:31.912080 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-utilities\") pod \"redhat-marketplace-skhkz\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:31 crc kubenswrapper[5070]: I1213 04:26:31.912162 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdz2q\" (UniqueName: \"kubernetes.io/projected/808bdec5-64c8-40c7-800b-b5089ecf5601-kube-api-access-zdz2q\") pod \"redhat-marketplace-skhkz\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:31 crc kubenswrapper[5070]: I1213 04:26:31.912214 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-catalog-content\") pod \"redhat-marketplace-skhkz\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:32 crc kubenswrapper[5070]: I1213 04:26:32.013670 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdz2q\" (UniqueName: \"kubernetes.io/projected/808bdec5-64c8-40c7-800b-b5089ecf5601-kube-api-access-zdz2q\") pod \"redhat-marketplace-skhkz\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:32 crc kubenswrapper[5070]: I1213 04:26:32.013998 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-catalog-content\") pod \"redhat-marketplace-skhkz\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:32 crc kubenswrapper[5070]: I1213 04:26:32.014229 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-utilities\") pod \"redhat-marketplace-skhkz\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:32 crc kubenswrapper[5070]: I1213 04:26:32.014663 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-catalog-content\") pod \"redhat-marketplace-skhkz\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:32 crc kubenswrapper[5070]: I1213 04:26:32.014663 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-utilities\") pod \"redhat-marketplace-skhkz\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:32 crc kubenswrapper[5070]: I1213 04:26:32.047291 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdz2q\" (UniqueName: \"kubernetes.io/projected/808bdec5-64c8-40c7-800b-b5089ecf5601-kube-api-access-zdz2q\") pod \"redhat-marketplace-skhkz\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:32 crc kubenswrapper[5070]: I1213 04:26:32.157913 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:32 crc kubenswrapper[5070]: I1213 04:26:32.453434 5070 generic.go:334] "Generic (PLEG): container finished" podID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerID="df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc" exitCode=0 Dec 13 04:26:32 crc kubenswrapper[5070]: I1213 04:26:32.453499 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjq8" event={"ID":"bfd0656f-58fb-4ee8-a3b2-cc3e19296581","Type":"ContainerDied","Data":"df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc"} Dec 13 04:26:32 crc kubenswrapper[5070]: I1213 04:26:32.687480 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-skhkz"] Dec 13 04:26:33 crc kubenswrapper[5070]: I1213 04:26:33.463512 5070 generic.go:334] "Generic (PLEG): container finished" podID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerID="54123e80d91e805f27bdaaf5d8e1095d9852b602db45048a01af5ac3991ca8f0" exitCode=0 Dec 13 04:26:33 crc kubenswrapper[5070]: I1213 04:26:33.463575 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skhkz" event={"ID":"808bdec5-64c8-40c7-800b-b5089ecf5601","Type":"ContainerDied","Data":"54123e80d91e805f27bdaaf5d8e1095d9852b602db45048a01af5ac3991ca8f0"} Dec 13 04:26:33 crc kubenswrapper[5070]: I1213 04:26:33.463876 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skhkz" event={"ID":"808bdec5-64c8-40c7-800b-b5089ecf5601","Type":"ContainerStarted","Data":"3c42960af13b67c11f772b77bede468c80153311e1552560843b48f0657c848a"} Dec 13 04:26:33 crc kubenswrapper[5070]: I1213 04:26:33.467557 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjq8" event={"ID":"bfd0656f-58fb-4ee8-a3b2-cc3e19296581","Type":"ContainerStarted","Data":"65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8"} Dec 13 04:26:33 crc kubenswrapper[5070]: I1213 04:26:33.503467 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mwjq8" podStartSLOduration=2.939295777 podStartE2EDuration="5.503428296s" podCreationTimestamp="2025-12-13 04:26:28 +0000 UTC" firstStartedPulling="2025-12-13 04:26:30.433469347 +0000 UTC m=+4482.669312913" lastFinishedPulling="2025-12-13 04:26:32.997601876 +0000 UTC m=+4485.233445432" observedRunningTime="2025-12-13 04:26:33.500396324 +0000 UTC m=+4485.736239870" watchObservedRunningTime="2025-12-13 04:26:33.503428296 +0000 UTC m=+4485.739271852" Dec 13 04:26:34 crc kubenswrapper[5070]: I1213 04:26:34.484205 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skhkz" event={"ID":"808bdec5-64c8-40c7-800b-b5089ecf5601","Type":"ContainerStarted","Data":"90a07418bf094740353cb7ad3825b04d8612b2182c764a3a1025a0dd673d0f7a"} Dec 13 04:26:35 crc kubenswrapper[5070]: I1213 04:26:35.493173 5070 generic.go:334] "Generic (PLEG): container finished" podID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerID="90a07418bf094740353cb7ad3825b04d8612b2182c764a3a1025a0dd673d0f7a" exitCode=0 Dec 13 04:26:35 crc kubenswrapper[5070]: I1213 04:26:35.493360 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skhkz" event={"ID":"808bdec5-64c8-40c7-800b-b5089ecf5601","Type":"ContainerDied","Data":"90a07418bf094740353cb7ad3825b04d8612b2182c764a3a1025a0dd673d0f7a"} Dec 13 04:26:36 crc kubenswrapper[5070]: I1213 04:26:36.507899 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skhkz" event={"ID":"808bdec5-64c8-40c7-800b-b5089ecf5601","Type":"ContainerStarted","Data":"7df6a9f621d283db2d9983da7573626a705402b520e4a78e119c1fcecbd0ae47"} Dec 13 04:26:36 crc kubenswrapper[5070]: I1213 04:26:36.531835 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-skhkz" podStartSLOduration=3.065844671 podStartE2EDuration="5.531810316s" podCreationTimestamp="2025-12-13 04:26:31 +0000 UTC" firstStartedPulling="2025-12-13 04:26:33.465176647 +0000 UTC m=+4485.701020193" lastFinishedPulling="2025-12-13 04:26:35.931142292 +0000 UTC m=+4488.166985838" observedRunningTime="2025-12-13 04:26:36.525078894 +0000 UTC m=+4488.760922440" watchObservedRunningTime="2025-12-13 04:26:36.531810316 +0000 UTC m=+4488.767653862" Dec 13 04:26:39 crc kubenswrapper[5070]: I1213 04:26:39.003049 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:39 crc kubenswrapper[5070]: I1213 04:26:39.003392 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:39 crc kubenswrapper[5070]: I1213 04:26:39.053559 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:39 crc kubenswrapper[5070]: I1213 04:26:39.616402 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:40 crc kubenswrapper[5070]: I1213 04:26:40.397529 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mwjq8"] Dec 13 04:26:41 crc kubenswrapper[5070]: I1213 04:26:41.556956 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mwjq8" podUID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerName="registry-server" containerID="cri-o://65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8" gracePeriod=2 Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.158397 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.159099 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.235356 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.440399 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.489381 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-catalog-content\") pod \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.489432 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfd2p\" (UniqueName: \"kubernetes.io/projected/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-kube-api-access-qfd2p\") pod \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.489473 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-utilities\") pod \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\" (UID: \"bfd0656f-58fb-4ee8-a3b2-cc3e19296581\") " Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.490741 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-utilities" (OuterVolumeSpecName: "utilities") pod "bfd0656f-58fb-4ee8-a3b2-cc3e19296581" (UID: "bfd0656f-58fb-4ee8-a3b2-cc3e19296581"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.505083 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-kube-api-access-qfd2p" (OuterVolumeSpecName: "kube-api-access-qfd2p") pod "bfd0656f-58fb-4ee8-a3b2-cc3e19296581" (UID: "bfd0656f-58fb-4ee8-a3b2-cc3e19296581"). InnerVolumeSpecName "kube-api-access-qfd2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.551533 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bfd0656f-58fb-4ee8-a3b2-cc3e19296581" (UID: "bfd0656f-58fb-4ee8-a3b2-cc3e19296581"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.567876 5070 generic.go:334] "Generic (PLEG): container finished" podID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerID="65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8" exitCode=0 Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.567988 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwjq8" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.567972 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjq8" event={"ID":"bfd0656f-58fb-4ee8-a3b2-cc3e19296581","Type":"ContainerDied","Data":"65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8"} Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.569461 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjq8" event={"ID":"bfd0656f-58fb-4ee8-a3b2-cc3e19296581","Type":"ContainerDied","Data":"ce87dfc5d6fa99490d1fee54b1d51c41b9d0ff96987c2cbf932708e6150fc4a7"} Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.569490 5070 scope.go:117] "RemoveContainer" containerID="65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.592363 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.592412 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfd2p\" (UniqueName: \"kubernetes.io/projected/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-kube-api-access-qfd2p\") on node \"crc\" DevicePath \"\"" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.592423 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bfd0656f-58fb-4ee8-a3b2-cc3e19296581-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.595510 5070 scope.go:117] "RemoveContainer" containerID="df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.612434 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mwjq8"] Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.622358 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mwjq8"] Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.636252 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.646613 5070 scope.go:117] "RemoveContainer" containerID="90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.690720 5070 scope.go:117] "RemoveContainer" containerID="65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8" Dec 13 04:26:42 crc kubenswrapper[5070]: E1213 04:26:42.691187 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8\": container with ID starting with 65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8 not found: ID does not exist" containerID="65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.691234 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8"} err="failed to get container status \"65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8\": rpc error: code = NotFound desc = could not find container \"65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8\": container with ID starting with 65f7cc0a85d430c1ef5129b33fff4ac4f54bbc6f99a72b51c76b1748bf5e1ec8 not found: ID does not exist" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.691292 5070 scope.go:117] "RemoveContainer" containerID="df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc" Dec 13 04:26:42 crc kubenswrapper[5070]: E1213 04:26:42.691832 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc\": container with ID starting with df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc not found: ID does not exist" containerID="df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.691901 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc"} err="failed to get container status \"df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc\": rpc error: code = NotFound desc = could not find container \"df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc\": container with ID starting with df208770ed76bf35148baee992e18e02d0971674f3bccd802ec0fcd0062178bc not found: ID does not exist" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.691951 5070 scope.go:117] "RemoveContainer" containerID="90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438" Dec 13 04:26:42 crc kubenswrapper[5070]: E1213 04:26:42.692326 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438\": container with ID starting with 90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438 not found: ID does not exist" containerID="90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438" Dec 13 04:26:42 crc kubenswrapper[5070]: I1213 04:26:42.692364 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438"} err="failed to get container status \"90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438\": rpc error: code = NotFound desc = could not find container \"90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438\": container with ID starting with 90a07e45eb60951266f66e996c50ee9f3e43de2cc38d207abde4caed9cb5a438 not found: ID does not exist" Dec 13 04:26:42 crc kubenswrapper[5070]: E1213 04:26:42.793712 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfd0656f_58fb_4ee8_a3b2_cc3e19296581.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfd0656f_58fb_4ee8_a3b2_cc3e19296581.slice/crio-ce87dfc5d6fa99490d1fee54b1d51c41b9d0ff96987c2cbf932708e6150fc4a7\": RecentStats: unable to find data in memory cache]" Dec 13 04:26:44 crc kubenswrapper[5070]: I1213 04:26:44.179625 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" path="/var/lib/kubelet/pods/bfd0656f-58fb-4ee8-a3b2-cc3e19296581/volumes" Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.006314 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-skhkz"] Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.006743 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-skhkz" podUID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerName="registry-server" containerID="cri-o://7df6a9f621d283db2d9983da7573626a705402b520e4a78e119c1fcecbd0ae47" gracePeriod=2 Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.609796 5070 generic.go:334] "Generic (PLEG): container finished" podID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerID="7df6a9f621d283db2d9983da7573626a705402b520e4a78e119c1fcecbd0ae47" exitCode=0 Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.609875 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skhkz" event={"ID":"808bdec5-64c8-40c7-800b-b5089ecf5601","Type":"ContainerDied","Data":"7df6a9f621d283db2d9983da7573626a705402b520e4a78e119c1fcecbd0ae47"} Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.610329 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skhkz" event={"ID":"808bdec5-64c8-40c7-800b-b5089ecf5601","Type":"ContainerDied","Data":"3c42960af13b67c11f772b77bede468c80153311e1552560843b48f0657c848a"} Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.610349 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c42960af13b67c11f772b77bede468c80153311e1552560843b48f0657c848a" Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.623973 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.658697 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdz2q\" (UniqueName: \"kubernetes.io/projected/808bdec5-64c8-40c7-800b-b5089ecf5601-kube-api-access-zdz2q\") pod \"808bdec5-64c8-40c7-800b-b5089ecf5601\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.658847 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-utilities\") pod \"808bdec5-64c8-40c7-800b-b5089ecf5601\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.658928 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-catalog-content\") pod \"808bdec5-64c8-40c7-800b-b5089ecf5601\" (UID: \"808bdec5-64c8-40c7-800b-b5089ecf5601\") " Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.664424 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-utilities" (OuterVolumeSpecName: "utilities") pod "808bdec5-64c8-40c7-800b-b5089ecf5601" (UID: "808bdec5-64c8-40c7-800b-b5089ecf5601"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.674857 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/808bdec5-64c8-40c7-800b-b5089ecf5601-kube-api-access-zdz2q" (OuterVolumeSpecName: "kube-api-access-zdz2q") pod "808bdec5-64c8-40c7-800b-b5089ecf5601" (UID: "808bdec5-64c8-40c7-800b-b5089ecf5601"). InnerVolumeSpecName "kube-api-access-zdz2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.691525 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "808bdec5-64c8-40c7-800b-b5089ecf5601" (UID: "808bdec5-64c8-40c7-800b-b5089ecf5601"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.761223 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.761257 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/808bdec5-64c8-40c7-800b-b5089ecf5601-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:26:45 crc kubenswrapper[5070]: I1213 04:26:45.761294 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdz2q\" (UniqueName: \"kubernetes.io/projected/808bdec5-64c8-40c7-800b-b5089ecf5601-kube-api-access-zdz2q\") on node \"crc\" DevicePath \"\"" Dec 13 04:26:46 crc kubenswrapper[5070]: I1213 04:26:46.617115 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skhkz" Dec 13 04:26:46 crc kubenswrapper[5070]: I1213 04:26:46.639419 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-skhkz"] Dec 13 04:26:46 crc kubenswrapper[5070]: I1213 04:26:46.648621 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-skhkz"] Dec 13 04:26:48 crc kubenswrapper[5070]: I1213 04:26:48.184967 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="808bdec5-64c8-40c7-800b-b5089ecf5601" path="/var/lib/kubelet/pods/808bdec5-64c8-40c7-800b-b5089ecf5601/volumes" Dec 13 04:27:21 crc kubenswrapper[5070]: I1213 04:27:21.942517 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:27:21 crc kubenswrapper[5070]: I1213 04:27:21.942987 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:27:51 crc kubenswrapper[5070]: I1213 04:27:51.943029 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:27:51 crc kubenswrapper[5070]: I1213 04:27:51.943624 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:28:21 crc kubenswrapper[5070]: I1213 04:28:21.942723 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:28:21 crc kubenswrapper[5070]: I1213 04:28:21.943299 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:28:21 crc kubenswrapper[5070]: I1213 04:28:21.943355 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 04:28:21 crc kubenswrapper[5070]: I1213 04:28:21.944207 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 04:28:21 crc kubenswrapper[5070]: I1213 04:28:21.944256 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" gracePeriod=600 Dec 13 04:28:22 crc kubenswrapper[5070]: E1213 04:28:22.071948 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:28:22 crc kubenswrapper[5070]: I1213 04:28:22.573901 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" exitCode=0 Dec 13 04:28:22 crc kubenswrapper[5070]: I1213 04:28:22.573964 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e"} Dec 13 04:28:22 crc kubenswrapper[5070]: I1213 04:28:22.574015 5070 scope.go:117] "RemoveContainer" containerID="5c4c80ce18489e1afdd757bbada130f5afc77f20232af565446644cfcfe4e564" Dec 13 04:28:22 crc kubenswrapper[5070]: I1213 04:28:22.575373 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:28:22 crc kubenswrapper[5070]: E1213 04:28:22.576073 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:28:36 crc kubenswrapper[5070]: I1213 04:28:36.167784 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:28:36 crc kubenswrapper[5070]: E1213 04:28:36.168814 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:28:49 crc kubenswrapper[5070]: I1213 04:28:49.167151 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:28:49 crc kubenswrapper[5070]: E1213 04:28:49.167889 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:29:00 crc kubenswrapper[5070]: I1213 04:29:00.167619 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:29:00 crc kubenswrapper[5070]: E1213 04:29:00.168660 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:29:14 crc kubenswrapper[5070]: I1213 04:29:14.167929 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:29:14 crc kubenswrapper[5070]: E1213 04:29:14.169194 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:29:25 crc kubenswrapper[5070]: I1213 04:29:25.168321 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:29:25 crc kubenswrapper[5070]: E1213 04:29:25.169560 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:29:39 crc kubenswrapper[5070]: I1213 04:29:39.168208 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:29:39 crc kubenswrapper[5070]: E1213 04:29:39.169341 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.973299 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5xxkq"] Dec 13 04:29:49 crc kubenswrapper[5070]: E1213 04:29:49.974341 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerName="registry-server" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.974357 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerName="registry-server" Dec 13 04:29:49 crc kubenswrapper[5070]: E1213 04:29:49.974373 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerName="extract-content" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.974381 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerName="extract-content" Dec 13 04:29:49 crc kubenswrapper[5070]: E1213 04:29:49.974407 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerName="extract-content" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.974416 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerName="extract-content" Dec 13 04:29:49 crc kubenswrapper[5070]: E1213 04:29:49.974455 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerName="extract-utilities" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.974464 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerName="extract-utilities" Dec 13 04:29:49 crc kubenswrapper[5070]: E1213 04:29:49.974482 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerName="extract-utilities" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.974489 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerName="extract-utilities" Dec 13 04:29:49 crc kubenswrapper[5070]: E1213 04:29:49.974514 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerName="registry-server" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.974521 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerName="registry-server" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.974743 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="808bdec5-64c8-40c7-800b-b5089ecf5601" containerName="registry-server" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.974776 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfd0656f-58fb-4ee8-a3b2-cc3e19296581" containerName="registry-server" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.976465 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:49 crc kubenswrapper[5070]: I1213 04:29:49.992339 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5xxkq"] Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.125750 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-utilities\") pod \"redhat-operators-5xxkq\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.126297 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-catalog-content\") pod \"redhat-operators-5xxkq\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.126326 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q494x\" (UniqueName: \"kubernetes.io/projected/a94e56f1-18c2-4640-a495-49b747d61d97-kube-api-access-q494x\") pod \"redhat-operators-5xxkq\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.227977 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-catalog-content\") pod \"redhat-operators-5xxkq\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.228036 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q494x\" (UniqueName: \"kubernetes.io/projected/a94e56f1-18c2-4640-a495-49b747d61d97-kube-api-access-q494x\") pod \"redhat-operators-5xxkq\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.228100 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-utilities\") pod \"redhat-operators-5xxkq\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.228851 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-catalog-content\") pod \"redhat-operators-5xxkq\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.228882 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-utilities\") pod \"redhat-operators-5xxkq\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.247252 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q494x\" (UniqueName: \"kubernetes.io/projected/a94e56f1-18c2-4640-a495-49b747d61d97-kube-api-access-q494x\") pod \"redhat-operators-5xxkq\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.313071 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:29:50 crc kubenswrapper[5070]: I1213 04:29:50.779639 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5xxkq"] Dec 13 04:29:51 crc kubenswrapper[5070]: I1213 04:29:51.555372 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xxkq" event={"ID":"a94e56f1-18c2-4640-a495-49b747d61d97","Type":"ContainerStarted","Data":"0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527"} Dec 13 04:29:51 crc kubenswrapper[5070]: I1213 04:29:51.555690 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xxkq" event={"ID":"a94e56f1-18c2-4640-a495-49b747d61d97","Type":"ContainerStarted","Data":"c0443ec752c10a29882c06cea1fa17836374255851035f96ab7d259cd5ca9291"} Dec 13 04:29:52 crc kubenswrapper[5070]: I1213 04:29:52.169199 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:29:52 crc kubenswrapper[5070]: E1213 04:29:52.169757 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:29:52 crc kubenswrapper[5070]: I1213 04:29:52.571561 5070 generic.go:334] "Generic (PLEG): container finished" podID="a94e56f1-18c2-4640-a495-49b747d61d97" containerID="0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527" exitCode=0 Dec 13 04:29:52 crc kubenswrapper[5070]: I1213 04:29:52.571631 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xxkq" event={"ID":"a94e56f1-18c2-4640-a495-49b747d61d97","Type":"ContainerDied","Data":"0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527"} Dec 13 04:29:52 crc kubenswrapper[5070]: I1213 04:29:52.576089 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 04:29:53 crc kubenswrapper[5070]: I1213 04:29:53.584385 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xxkq" event={"ID":"a94e56f1-18c2-4640-a495-49b747d61d97","Type":"ContainerStarted","Data":"8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc"} Dec 13 04:29:55 crc kubenswrapper[5070]: I1213 04:29:55.607716 5070 generic.go:334] "Generic (PLEG): container finished" podID="a94e56f1-18c2-4640-a495-49b747d61d97" containerID="8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc" exitCode=0 Dec 13 04:29:55 crc kubenswrapper[5070]: I1213 04:29:55.607770 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xxkq" event={"ID":"a94e56f1-18c2-4640-a495-49b747d61d97","Type":"ContainerDied","Data":"8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc"} Dec 13 04:29:56 crc kubenswrapper[5070]: I1213 04:29:56.623659 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xxkq" event={"ID":"a94e56f1-18c2-4640-a495-49b747d61d97","Type":"ContainerStarted","Data":"664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff"} Dec 13 04:29:56 crc kubenswrapper[5070]: I1213 04:29:56.655003 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5xxkq" podStartSLOduration=4.13369732 podStartE2EDuration="7.654981169s" podCreationTimestamp="2025-12-13 04:29:49 +0000 UTC" firstStartedPulling="2025-12-13 04:29:52.575692684 +0000 UTC m=+4684.811536270" lastFinishedPulling="2025-12-13 04:29:56.096976563 +0000 UTC m=+4688.332820119" observedRunningTime="2025-12-13 04:29:56.642822509 +0000 UTC m=+4688.878666065" watchObservedRunningTime="2025-12-13 04:29:56.654981169 +0000 UTC m=+4688.890824725" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.148026 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg"] Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.149588 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.152516 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.152543 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.158432 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg"] Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.234686 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aee76c5f-15af-4809-b1f6-b7c28417d9c6-config-volume\") pod \"collect-profiles-29426670-lc2jg\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.234744 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfgcm\" (UniqueName: \"kubernetes.io/projected/aee76c5f-15af-4809-b1f6-b7c28417d9c6-kube-api-access-cfgcm\") pod \"collect-profiles-29426670-lc2jg\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.234896 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aee76c5f-15af-4809-b1f6-b7c28417d9c6-secret-volume\") pod \"collect-profiles-29426670-lc2jg\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.313865 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.313916 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.337493 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aee76c5f-15af-4809-b1f6-b7c28417d9c6-config-volume\") pod \"collect-profiles-29426670-lc2jg\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.337606 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfgcm\" (UniqueName: \"kubernetes.io/projected/aee76c5f-15af-4809-b1f6-b7c28417d9c6-kube-api-access-cfgcm\") pod \"collect-profiles-29426670-lc2jg\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.337705 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aee76c5f-15af-4809-b1f6-b7c28417d9c6-secret-volume\") pod \"collect-profiles-29426670-lc2jg\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.339207 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aee76c5f-15af-4809-b1f6-b7c28417d9c6-config-volume\") pod \"collect-profiles-29426670-lc2jg\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.343459 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aee76c5f-15af-4809-b1f6-b7c28417d9c6-secret-volume\") pod \"collect-profiles-29426670-lc2jg\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.369339 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfgcm\" (UniqueName: \"kubernetes.io/projected/aee76c5f-15af-4809-b1f6-b7c28417d9c6-kube-api-access-cfgcm\") pod \"collect-profiles-29426670-lc2jg\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:00 crc kubenswrapper[5070]: I1213 04:30:00.476565 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:01 crc kubenswrapper[5070]: I1213 04:30:01.371246 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5xxkq" podUID="a94e56f1-18c2-4640-a495-49b747d61d97" containerName="registry-server" probeResult="failure" output=< Dec 13 04:30:01 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 04:30:01 crc kubenswrapper[5070]: > Dec 13 04:30:01 crc kubenswrapper[5070]: I1213 04:30:01.459300 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg"] Dec 13 04:30:01 crc kubenswrapper[5070]: I1213 04:30:01.666458 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" event={"ID":"aee76c5f-15af-4809-b1f6-b7c28417d9c6","Type":"ContainerStarted","Data":"f892f72aeda6837a4be30fa0274bee7ce725b562b11b0c8cf1db11e1ae5d043f"} Dec 13 04:30:02 crc kubenswrapper[5070]: I1213 04:30:02.680525 5070 generic.go:334] "Generic (PLEG): container finished" podID="aee76c5f-15af-4809-b1f6-b7c28417d9c6" containerID="f10fef9cee44d17414f7665373c78248642b1696fa6ad4e444078dddb5b3ece3" exitCode=0 Dec 13 04:30:02 crc kubenswrapper[5070]: I1213 04:30:02.680732 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" event={"ID":"aee76c5f-15af-4809-b1f6-b7c28417d9c6","Type":"ContainerDied","Data":"f10fef9cee44d17414f7665373c78248642b1696fa6ad4e444078dddb5b3ece3"} Dec 13 04:30:03 crc kubenswrapper[5070]: I1213 04:30:03.166748 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:30:03 crc kubenswrapper[5070]: E1213 04:30:03.167228 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.245013 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.344496 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aee76c5f-15af-4809-b1f6-b7c28417d9c6-secret-volume\") pod \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.344566 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aee76c5f-15af-4809-b1f6-b7c28417d9c6-config-volume\") pod \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.344606 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfgcm\" (UniqueName: \"kubernetes.io/projected/aee76c5f-15af-4809-b1f6-b7c28417d9c6-kube-api-access-cfgcm\") pod \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\" (UID: \"aee76c5f-15af-4809-b1f6-b7c28417d9c6\") " Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.345229 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aee76c5f-15af-4809-b1f6-b7c28417d9c6-config-volume" (OuterVolumeSpecName: "config-volume") pod "aee76c5f-15af-4809-b1f6-b7c28417d9c6" (UID: "aee76c5f-15af-4809-b1f6-b7c28417d9c6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.350887 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aee76c5f-15af-4809-b1f6-b7c28417d9c6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "aee76c5f-15af-4809-b1f6-b7c28417d9c6" (UID: "aee76c5f-15af-4809-b1f6-b7c28417d9c6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.354631 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee76c5f-15af-4809-b1f6-b7c28417d9c6-kube-api-access-cfgcm" (OuterVolumeSpecName: "kube-api-access-cfgcm") pod "aee76c5f-15af-4809-b1f6-b7c28417d9c6" (UID: "aee76c5f-15af-4809-b1f6-b7c28417d9c6"). InnerVolumeSpecName "kube-api-access-cfgcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.446807 5070 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aee76c5f-15af-4809-b1f6-b7c28417d9c6-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.446858 5070 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aee76c5f-15af-4809-b1f6-b7c28417d9c6-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.446869 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfgcm\" (UniqueName: \"kubernetes.io/projected/aee76c5f-15af-4809-b1f6-b7c28417d9c6-kube-api-access-cfgcm\") on node \"crc\" DevicePath \"\"" Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.705183 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" event={"ID":"aee76c5f-15af-4809-b1f6-b7c28417d9c6","Type":"ContainerDied","Data":"f892f72aeda6837a4be30fa0274bee7ce725b562b11b0c8cf1db11e1ae5d043f"} Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.705219 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426670-lc2jg" Dec 13 04:30:04 crc kubenswrapper[5070]: I1213 04:30:04.705229 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f892f72aeda6837a4be30fa0274bee7ce725b562b11b0c8cf1db11e1ae5d043f" Dec 13 04:30:05 crc kubenswrapper[5070]: I1213 04:30:05.351130 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh"] Dec 13 04:30:05 crc kubenswrapper[5070]: I1213 04:30:05.361208 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426625-7vnfh"] Dec 13 04:30:06 crc kubenswrapper[5070]: I1213 04:30:06.178768 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abeebb11-0cb3-4bd7-8817-7a6495a2fe47" path="/var/lib/kubelet/pods/abeebb11-0cb3-4bd7-8817-7a6495a2fe47/volumes" Dec 13 04:30:10 crc kubenswrapper[5070]: I1213 04:30:10.392202 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:30:10 crc kubenswrapper[5070]: I1213 04:30:10.451087 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:30:10 crc kubenswrapper[5070]: I1213 04:30:10.651406 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5xxkq"] Dec 13 04:30:11 crc kubenswrapper[5070]: I1213 04:30:11.787909 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5xxkq" podUID="a94e56f1-18c2-4640-a495-49b747d61d97" containerName="registry-server" containerID="cri-o://664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff" gracePeriod=2 Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.278353 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.426319 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q494x\" (UniqueName: \"kubernetes.io/projected/a94e56f1-18c2-4640-a495-49b747d61d97-kube-api-access-q494x\") pod \"a94e56f1-18c2-4640-a495-49b747d61d97\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.426568 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-catalog-content\") pod \"a94e56f1-18c2-4640-a495-49b747d61d97\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.426728 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-utilities\") pod \"a94e56f1-18c2-4640-a495-49b747d61d97\" (UID: \"a94e56f1-18c2-4640-a495-49b747d61d97\") " Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.428054 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-utilities" (OuterVolumeSpecName: "utilities") pod "a94e56f1-18c2-4640-a495-49b747d61d97" (UID: "a94e56f1-18c2-4640-a495-49b747d61d97"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.440949 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a94e56f1-18c2-4640-a495-49b747d61d97-kube-api-access-q494x" (OuterVolumeSpecName: "kube-api-access-q494x") pod "a94e56f1-18c2-4640-a495-49b747d61d97" (UID: "a94e56f1-18c2-4640-a495-49b747d61d97"). InnerVolumeSpecName "kube-api-access-q494x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.529696 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q494x\" (UniqueName: \"kubernetes.io/projected/a94e56f1-18c2-4640-a495-49b747d61d97-kube-api-access-q494x\") on node \"crc\" DevicePath \"\"" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.529758 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.590247 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a94e56f1-18c2-4640-a495-49b747d61d97" (UID: "a94e56f1-18c2-4640-a495-49b747d61d97"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.636613 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a94e56f1-18c2-4640-a495-49b747d61d97-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.798643 5070 generic.go:334] "Generic (PLEG): container finished" podID="a94e56f1-18c2-4640-a495-49b747d61d97" containerID="664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff" exitCode=0 Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.798695 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xxkq" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.798712 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xxkq" event={"ID":"a94e56f1-18c2-4640-a495-49b747d61d97","Type":"ContainerDied","Data":"664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff"} Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.800109 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xxkq" event={"ID":"a94e56f1-18c2-4640-a495-49b747d61d97","Type":"ContainerDied","Data":"c0443ec752c10a29882c06cea1fa17836374255851035f96ab7d259cd5ca9291"} Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.800131 5070 scope.go:117] "RemoveContainer" containerID="664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.841471 5070 scope.go:117] "RemoveContainer" containerID="8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.852697 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5xxkq"] Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.864094 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5xxkq"] Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.874669 5070 scope.go:117] "RemoveContainer" containerID="0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.936298 5070 scope.go:117] "RemoveContainer" containerID="664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff" Dec 13 04:30:12 crc kubenswrapper[5070]: E1213 04:30:12.936857 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff\": container with ID starting with 664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff not found: ID does not exist" containerID="664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.936947 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff"} err="failed to get container status \"664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff\": rpc error: code = NotFound desc = could not find container \"664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff\": container with ID starting with 664c0c6144cce71fc33b947c59e1fbc1b70158d9311c5637e6b28b0652540aff not found: ID does not exist" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.936998 5070 scope.go:117] "RemoveContainer" containerID="8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc" Dec 13 04:30:12 crc kubenswrapper[5070]: E1213 04:30:12.937599 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc\": container with ID starting with 8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc not found: ID does not exist" containerID="8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.937643 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc"} err="failed to get container status \"8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc\": rpc error: code = NotFound desc = could not find container \"8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc\": container with ID starting with 8a13a102260122c420ad2d30611090c50da53459baf1e72364154d3e190225bc not found: ID does not exist" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.937670 5070 scope.go:117] "RemoveContainer" containerID="0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527" Dec 13 04:30:12 crc kubenswrapper[5070]: E1213 04:30:12.938024 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527\": container with ID starting with 0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527 not found: ID does not exist" containerID="0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527" Dec 13 04:30:12 crc kubenswrapper[5070]: I1213 04:30:12.938068 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527"} err="failed to get container status \"0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527\": rpc error: code = NotFound desc = could not find container \"0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527\": container with ID starting with 0de64e6ac7ecc5b6d45eb090390149d72c52618c1efb4ba195941677bb64f527 not found: ID does not exist" Dec 13 04:30:14 crc kubenswrapper[5070]: I1213 04:30:14.192083 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a94e56f1-18c2-4640-a495-49b747d61d97" path="/var/lib/kubelet/pods/a94e56f1-18c2-4640-a495-49b747d61d97/volumes" Dec 13 04:30:17 crc kubenswrapper[5070]: I1213 04:30:17.168053 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:30:17 crc kubenswrapper[5070]: E1213 04:30:17.169263 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:30:30 crc kubenswrapper[5070]: I1213 04:30:30.167010 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:30:30 crc kubenswrapper[5070]: E1213 04:30:30.167863 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:30:32 crc kubenswrapper[5070]: I1213 04:30:32.707526 5070 scope.go:117] "RemoveContainer" containerID="6d84daa83a308bbbb87c67ab26ee4b5ed48d91a2e01d2aaf213120d7f2066e9a" Dec 13 04:30:42 crc kubenswrapper[5070]: I1213 04:30:42.166855 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:30:42 crc kubenswrapper[5070]: E1213 04:30:42.167413 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:30:55 crc kubenswrapper[5070]: I1213 04:30:55.167788 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:30:55 crc kubenswrapper[5070]: E1213 04:30:55.169163 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:31:07 crc kubenswrapper[5070]: I1213 04:31:07.167324 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:31:07 crc kubenswrapper[5070]: E1213 04:31:07.167917 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:31:22 crc kubenswrapper[5070]: I1213 04:31:22.167740 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:31:22 crc kubenswrapper[5070]: E1213 04:31:22.168493 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:31:35 crc kubenswrapper[5070]: I1213 04:31:35.167418 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:31:35 crc kubenswrapper[5070]: E1213 04:31:35.168572 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:31:49 crc kubenswrapper[5070]: I1213 04:31:49.167588 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:31:49 crc kubenswrapper[5070]: E1213 04:31:49.168777 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:32:03 crc kubenswrapper[5070]: I1213 04:32:03.167245 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:32:03 crc kubenswrapper[5070]: E1213 04:32:03.168222 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:32:16 crc kubenswrapper[5070]: I1213 04:32:16.166993 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:32:16 crc kubenswrapper[5070]: E1213 04:32:16.167895 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:32:31 crc kubenswrapper[5070]: I1213 04:32:31.167366 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:32:31 crc kubenswrapper[5070]: E1213 04:32:31.168616 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:32:32 crc kubenswrapper[5070]: I1213 04:32:32.811031 5070 scope.go:117] "RemoveContainer" containerID="54123e80d91e805f27bdaaf5d8e1095d9852b602db45048a01af5ac3991ca8f0" Dec 13 04:32:43 crc kubenswrapper[5070]: I1213 04:32:43.168134 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:32:43 crc kubenswrapper[5070]: E1213 04:32:43.169506 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:32:56 crc kubenswrapper[5070]: I1213 04:32:56.167980 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:32:56 crc kubenswrapper[5070]: E1213 04:32:56.172431 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:33:11 crc kubenswrapper[5070]: I1213 04:33:11.167616 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:33:11 crc kubenswrapper[5070]: E1213 04:33:11.168407 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:33:22 crc kubenswrapper[5070]: I1213 04:33:22.167328 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:33:22 crc kubenswrapper[5070]: I1213 04:33:22.915732 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"c34f4a52bb9647b9f7bcdab3043ab312c6ab456ed8b7b789d6017a360b184ec2"} Dec 13 04:33:32 crc kubenswrapper[5070]: I1213 04:33:32.908388 5070 scope.go:117] "RemoveContainer" containerID="7df6a9f621d283db2d9983da7573626a705402b520e4a78e119c1fcecbd0ae47" Dec 13 04:33:32 crc kubenswrapper[5070]: I1213 04:33:32.949602 5070 scope.go:117] "RemoveContainer" containerID="90a07418bf094740353cb7ad3825b04d8612b2182c764a3a1025a0dd673d0f7a" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.865361 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mmwc6"] Dec 13 04:34:07 crc kubenswrapper[5070]: E1213 04:34:07.868902 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a94e56f1-18c2-4640-a495-49b747d61d97" containerName="registry-server" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.868937 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="a94e56f1-18c2-4640-a495-49b747d61d97" containerName="registry-server" Dec 13 04:34:07 crc kubenswrapper[5070]: E1213 04:34:07.868971 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a94e56f1-18c2-4640-a495-49b747d61d97" containerName="extract-content" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.868984 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="a94e56f1-18c2-4640-a495-49b747d61d97" containerName="extract-content" Dec 13 04:34:07 crc kubenswrapper[5070]: E1213 04:34:07.869003 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee76c5f-15af-4809-b1f6-b7c28417d9c6" containerName="collect-profiles" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.869015 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee76c5f-15af-4809-b1f6-b7c28417d9c6" containerName="collect-profiles" Dec 13 04:34:07 crc kubenswrapper[5070]: E1213 04:34:07.869043 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a94e56f1-18c2-4640-a495-49b747d61d97" containerName="extract-utilities" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.869055 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="a94e56f1-18c2-4640-a495-49b747d61d97" containerName="extract-utilities" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.869434 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="a94e56f1-18c2-4640-a495-49b747d61d97" containerName="registry-server" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.869497 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="aee76c5f-15af-4809-b1f6-b7c28417d9c6" containerName="collect-profiles" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.871565 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.896177 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mmwc6"] Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.991105 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-utilities\") pod \"certified-operators-mmwc6\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.991184 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7d9k\" (UniqueName: \"kubernetes.io/projected/3d8c349c-5668-47ae-abde-5e876ef3b846-kube-api-access-f7d9k\") pod \"certified-operators-mmwc6\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:07 crc kubenswrapper[5070]: I1213 04:34:07.991391 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-catalog-content\") pod \"certified-operators-mmwc6\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:08 crc kubenswrapper[5070]: I1213 04:34:08.093017 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-catalog-content\") pod \"certified-operators-mmwc6\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:08 crc kubenswrapper[5070]: I1213 04:34:08.093209 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-utilities\") pod \"certified-operators-mmwc6\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:08 crc kubenswrapper[5070]: I1213 04:34:08.093238 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7d9k\" (UniqueName: \"kubernetes.io/projected/3d8c349c-5668-47ae-abde-5e876ef3b846-kube-api-access-f7d9k\") pod \"certified-operators-mmwc6\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:08 crc kubenswrapper[5070]: I1213 04:34:08.093702 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-catalog-content\") pod \"certified-operators-mmwc6\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:08 crc kubenswrapper[5070]: I1213 04:34:08.093863 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-utilities\") pod \"certified-operators-mmwc6\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:08 crc kubenswrapper[5070]: I1213 04:34:08.116205 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7d9k\" (UniqueName: \"kubernetes.io/projected/3d8c349c-5668-47ae-abde-5e876ef3b846-kube-api-access-f7d9k\") pod \"certified-operators-mmwc6\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:08 crc kubenswrapper[5070]: I1213 04:34:08.209073 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:08 crc kubenswrapper[5070]: I1213 04:34:08.804341 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mmwc6"] Dec 13 04:34:09 crc kubenswrapper[5070]: I1213 04:34:09.426586 5070 generic.go:334] "Generic (PLEG): container finished" podID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerID="1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75" exitCode=0 Dec 13 04:34:09 crc kubenswrapper[5070]: I1213 04:34:09.426694 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmwc6" event={"ID":"3d8c349c-5668-47ae-abde-5e876ef3b846","Type":"ContainerDied","Data":"1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75"} Dec 13 04:34:09 crc kubenswrapper[5070]: I1213 04:34:09.426906 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmwc6" event={"ID":"3d8c349c-5668-47ae-abde-5e876ef3b846","Type":"ContainerStarted","Data":"15e0654baa56b4a55eaccccd8e562bc5a81bbce29ccc77ef3e592ee5bf7e25a7"} Dec 13 04:34:11 crc kubenswrapper[5070]: I1213 04:34:11.455989 5070 generic.go:334] "Generic (PLEG): container finished" podID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerID="969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70" exitCode=0 Dec 13 04:34:11 crc kubenswrapper[5070]: I1213 04:34:11.456094 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmwc6" event={"ID":"3d8c349c-5668-47ae-abde-5e876ef3b846","Type":"ContainerDied","Data":"969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70"} Dec 13 04:34:12 crc kubenswrapper[5070]: I1213 04:34:12.467215 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmwc6" event={"ID":"3d8c349c-5668-47ae-abde-5e876ef3b846","Type":"ContainerStarted","Data":"7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3"} Dec 13 04:34:12 crc kubenswrapper[5070]: I1213 04:34:12.488676 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mmwc6" podStartSLOduration=2.841651661 podStartE2EDuration="5.488652502s" podCreationTimestamp="2025-12-13 04:34:07 +0000 UTC" firstStartedPulling="2025-12-13 04:34:09.428343507 +0000 UTC m=+4941.664187053" lastFinishedPulling="2025-12-13 04:34:12.075344348 +0000 UTC m=+4944.311187894" observedRunningTime="2025-12-13 04:34:12.480311576 +0000 UTC m=+4944.716155122" watchObservedRunningTime="2025-12-13 04:34:12.488652502 +0000 UTC m=+4944.724496088" Dec 13 04:34:18 crc kubenswrapper[5070]: I1213 04:34:18.209655 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:18 crc kubenswrapper[5070]: I1213 04:34:18.210403 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:18 crc kubenswrapper[5070]: I1213 04:34:18.262941 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:18 crc kubenswrapper[5070]: I1213 04:34:18.596612 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:18 crc kubenswrapper[5070]: I1213 04:34:18.649242 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mmwc6"] Dec 13 04:34:20 crc kubenswrapper[5070]: I1213 04:34:20.547278 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mmwc6" podUID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerName="registry-server" containerID="cri-o://7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3" gracePeriod=2 Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.113128 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.244349 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-utilities\") pod \"3d8c349c-5668-47ae-abde-5e876ef3b846\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.244597 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7d9k\" (UniqueName: \"kubernetes.io/projected/3d8c349c-5668-47ae-abde-5e876ef3b846-kube-api-access-f7d9k\") pod \"3d8c349c-5668-47ae-abde-5e876ef3b846\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.244709 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-catalog-content\") pod \"3d8c349c-5668-47ae-abde-5e876ef3b846\" (UID: \"3d8c349c-5668-47ae-abde-5e876ef3b846\") " Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.246277 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-utilities" (OuterVolumeSpecName: "utilities") pod "3d8c349c-5668-47ae-abde-5e876ef3b846" (UID: "3d8c349c-5668-47ae-abde-5e876ef3b846"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.254975 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d8c349c-5668-47ae-abde-5e876ef3b846-kube-api-access-f7d9k" (OuterVolumeSpecName: "kube-api-access-f7d9k") pod "3d8c349c-5668-47ae-abde-5e876ef3b846" (UID: "3d8c349c-5668-47ae-abde-5e876ef3b846"). InnerVolumeSpecName "kube-api-access-f7d9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.347566 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7d9k\" (UniqueName: \"kubernetes.io/projected/3d8c349c-5668-47ae-abde-5e876ef3b846-kube-api-access-f7d9k\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.347613 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.565031 5070 generic.go:334] "Generic (PLEG): container finished" podID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerID="7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3" exitCode=0 Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.565091 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmwc6" event={"ID":"3d8c349c-5668-47ae-abde-5e876ef3b846","Type":"ContainerDied","Data":"7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3"} Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.565133 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mmwc6" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.565157 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmwc6" event={"ID":"3d8c349c-5668-47ae-abde-5e876ef3b846","Type":"ContainerDied","Data":"15e0654baa56b4a55eaccccd8e562bc5a81bbce29ccc77ef3e592ee5bf7e25a7"} Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.565185 5070 scope.go:117] "RemoveContainer" containerID="7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.600186 5070 scope.go:117] "RemoveContainer" containerID="969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.624108 5070 scope.go:117] "RemoveContainer" containerID="1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.664949 5070 scope.go:117] "RemoveContainer" containerID="7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3" Dec 13 04:34:21 crc kubenswrapper[5070]: E1213 04:34:21.665508 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3\": container with ID starting with 7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3 not found: ID does not exist" containerID="7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.665551 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3"} err="failed to get container status \"7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3\": rpc error: code = NotFound desc = could not find container \"7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3\": container with ID starting with 7a7457d71d1fa678da7ff66e63a1c00176bae52a584bfac641200b1971a66dd3 not found: ID does not exist" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.665577 5070 scope.go:117] "RemoveContainer" containerID="969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70" Dec 13 04:34:21 crc kubenswrapper[5070]: E1213 04:34:21.666064 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70\": container with ID starting with 969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70 not found: ID does not exist" containerID="969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.666114 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70"} err="failed to get container status \"969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70\": rpc error: code = NotFound desc = could not find container \"969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70\": container with ID starting with 969897028ae1676a25fbdd9b09d266b3698f9e93c38e8511e142375daff03b70 not found: ID does not exist" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.666138 5070 scope.go:117] "RemoveContainer" containerID="1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75" Dec 13 04:34:21 crc kubenswrapper[5070]: E1213 04:34:21.666514 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75\": container with ID starting with 1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75 not found: ID does not exist" containerID="1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.666534 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75"} err="failed to get container status \"1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75\": rpc error: code = NotFound desc = could not find container \"1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75\": container with ID starting with 1fc71ea6b994047e35c79d91aa1f5b3020cc05f24d804d7d73fcf586c4bb4b75 not found: ID does not exist" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.725841 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d8c349c-5668-47ae-abde-5e876ef3b846" (UID: "3d8c349c-5668-47ae-abde-5e876ef3b846"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.756980 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d8c349c-5668-47ae-abde-5e876ef3b846-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.906323 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mmwc6"] Dec 13 04:34:21 crc kubenswrapper[5070]: I1213 04:34:21.917006 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mmwc6"] Dec 13 04:34:22 crc kubenswrapper[5070]: I1213 04:34:22.204884 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d8c349c-5668-47ae-abde-5e876ef3b846" path="/var/lib/kubelet/pods/3d8c349c-5668-47ae-abde-5e876ef3b846/volumes" Dec 13 04:34:31 crc kubenswrapper[5070]: I1213 04:34:31.660984 5070 generic.go:334] "Generic (PLEG): container finished" podID="7414f018-4f89-4d33-a19a-af5e996ba16b" containerID="0f5c088cb0c5752ab3827eb14e29cb4b8aee7f15fb455b63fa7f235191d6cdf2" exitCode=1 Dec 13 04:34:31 crc kubenswrapper[5070]: I1213 04:34:31.661092 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7414f018-4f89-4d33-a19a-af5e996ba16b","Type":"ContainerDied","Data":"0f5c088cb0c5752ab3827eb14e29cb4b8aee7f15fb455b63fa7f235191d6cdf2"} Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.148182 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.209818 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config-secret\") pod \"7414f018-4f89-4d33-a19a-af5e996ba16b\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.210180 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-temporary\") pod \"7414f018-4f89-4d33-a19a-af5e996ba16b\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.210231 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"7414f018-4f89-4d33-a19a-af5e996ba16b\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.210305 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config\") pod \"7414f018-4f89-4d33-a19a-af5e996ba16b\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.210341 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ca-certs\") pod \"7414f018-4f89-4d33-a19a-af5e996ba16b\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.210459 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhq74\" (UniqueName: \"kubernetes.io/projected/7414f018-4f89-4d33-a19a-af5e996ba16b-kube-api-access-rhq74\") pod \"7414f018-4f89-4d33-a19a-af5e996ba16b\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.210581 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ssh-key\") pod \"7414f018-4f89-4d33-a19a-af5e996ba16b\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.210650 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-config-data\") pod \"7414f018-4f89-4d33-a19a-af5e996ba16b\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.210699 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-workdir\") pod \"7414f018-4f89-4d33-a19a-af5e996ba16b\" (UID: \"7414f018-4f89-4d33-a19a-af5e996ba16b\") " Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.211146 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "7414f018-4f89-4d33-a19a-af5e996ba16b" (UID: "7414f018-4f89-4d33-a19a-af5e996ba16b"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.211661 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-config-data" (OuterVolumeSpecName: "config-data") pod "7414f018-4f89-4d33-a19a-af5e996ba16b" (UID: "7414f018-4f89-4d33-a19a-af5e996ba16b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.215006 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7414f018-4f89-4d33-a19a-af5e996ba16b-kube-api-access-rhq74" (OuterVolumeSpecName: "kube-api-access-rhq74") pod "7414f018-4f89-4d33-a19a-af5e996ba16b" (UID: "7414f018-4f89-4d33-a19a-af5e996ba16b"). InnerVolumeSpecName "kube-api-access-rhq74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.215078 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "test-operator-logs") pod "7414f018-4f89-4d33-a19a-af5e996ba16b" (UID: "7414f018-4f89-4d33-a19a-af5e996ba16b"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.215571 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "7414f018-4f89-4d33-a19a-af5e996ba16b" (UID: "7414f018-4f89-4d33-a19a-af5e996ba16b"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.241409 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "7414f018-4f89-4d33-a19a-af5e996ba16b" (UID: "7414f018-4f89-4d33-a19a-af5e996ba16b"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.242546 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "7414f018-4f89-4d33-a19a-af5e996ba16b" (UID: "7414f018-4f89-4d33-a19a-af5e996ba16b"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.266791 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7414f018-4f89-4d33-a19a-af5e996ba16b" (UID: "7414f018-4f89-4d33-a19a-af5e996ba16b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.279035 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "7414f018-4f89-4d33-a19a-af5e996ba16b" (UID: "7414f018-4f89-4d33-a19a-af5e996ba16b"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.313020 5070 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.313336 5070 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.314863 5070 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.314947 5070 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.314968 5070 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7414f018-4f89-4d33-a19a-af5e996ba16b-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.314989 5070 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.315001 5070 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7414f018-4f89-4d33-a19a-af5e996ba16b-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.315010 5070 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7414f018-4f89-4d33-a19a-af5e996ba16b-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.315018 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhq74\" (UniqueName: \"kubernetes.io/projected/7414f018-4f89-4d33-a19a-af5e996ba16b-kube-api-access-rhq74\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.346734 5070 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.417137 5070 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.683081 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7414f018-4f89-4d33-a19a-af5e996ba16b","Type":"ContainerDied","Data":"3a79310110f02498db9e3a8c0b25832268fc84944139b04d9c8e13d3a09f089e"} Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.683121 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a79310110f02498db9e3a8c0b25832268fc84944139b04d9c8e13d3a09f089e" Dec 13 04:34:33 crc kubenswrapper[5070]: I1213 04:34:33.683173 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.114003 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 13 04:34:40 crc kubenswrapper[5070]: E1213 04:34:40.115495 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerName="extract-utilities" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.115530 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerName="extract-utilities" Dec 13 04:34:40 crc kubenswrapper[5070]: E1213 04:34:40.115582 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7414f018-4f89-4d33-a19a-af5e996ba16b" containerName="tempest-tests-tempest-tests-runner" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.115599 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="7414f018-4f89-4d33-a19a-af5e996ba16b" containerName="tempest-tests-tempest-tests-runner" Dec 13 04:34:40 crc kubenswrapper[5070]: E1213 04:34:40.115639 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerName="extract-content" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.115658 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerName="extract-content" Dec 13 04:34:40 crc kubenswrapper[5070]: E1213 04:34:40.115687 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerName="registry-server" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.115703 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerName="registry-server" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.116238 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="7414f018-4f89-4d33-a19a-af5e996ba16b" containerName="tempest-tests-tempest-tests-runner" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.116282 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8c349c-5668-47ae-abde-5e876ef3b846" containerName="registry-server" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.117655 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.122232 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.125125 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-l2j7l" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.163325 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljpfb\" (UniqueName: \"kubernetes.io/projected/3e4d60ee-2d7b-4caf-9cfd-ace33894a93f-kube-api-access-ljpfb\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"3e4d60ee-2d7b-4caf-9cfd-ace33894a93f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.163504 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"3e4d60ee-2d7b-4caf-9cfd-ace33894a93f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.264628 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljpfb\" (UniqueName: \"kubernetes.io/projected/3e4d60ee-2d7b-4caf-9cfd-ace33894a93f-kube-api-access-ljpfb\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"3e4d60ee-2d7b-4caf-9cfd-ace33894a93f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.264700 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"3e4d60ee-2d7b-4caf-9cfd-ace33894a93f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.265160 5070 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"3e4d60ee-2d7b-4caf-9cfd-ace33894a93f\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.299862 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljpfb\" (UniqueName: \"kubernetes.io/projected/3e4d60ee-2d7b-4caf-9cfd-ace33894a93f-kube-api-access-ljpfb\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"3e4d60ee-2d7b-4caf-9cfd-ace33894a93f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.302242 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"3e4d60ee-2d7b-4caf-9cfd-ace33894a93f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.463840 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 13 04:34:40 crc kubenswrapper[5070]: I1213 04:34:40.966289 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 13 04:34:41 crc kubenswrapper[5070]: I1213 04:34:41.771373 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"3e4d60ee-2d7b-4caf-9cfd-ace33894a93f","Type":"ContainerStarted","Data":"d26005cc474153737583bf357ff3202125a6c46a9794e4b2d6df02c8e60d6e05"} Dec 13 04:34:42 crc kubenswrapper[5070]: I1213 04:34:42.781940 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"3e4d60ee-2d7b-4caf-9cfd-ace33894a93f","Type":"ContainerStarted","Data":"f4b9a042a442d4ee3d33b8b68093c176faec90b2b8dce2a297d515263d7dd970"} Dec 13 04:34:42 crc kubenswrapper[5070]: I1213 04:34:42.808499 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.275508301 podStartE2EDuration="2.808472115s" podCreationTimestamp="2025-12-13 04:34:40 +0000 UTC" firstStartedPulling="2025-12-13 04:34:40.970582848 +0000 UTC m=+4973.206426414" lastFinishedPulling="2025-12-13 04:34:42.503546682 +0000 UTC m=+4974.739390228" observedRunningTime="2025-12-13 04:34:42.801422824 +0000 UTC m=+4975.037266380" watchObservedRunningTime="2025-12-13 04:34:42.808472115 +0000 UTC m=+4975.044315681" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.521021 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vqfsl/must-gather-22p7k"] Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.523271 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/must-gather-22p7k" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.524835 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-vqfsl"/"openshift-service-ca.crt" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.526976 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-must-gather-output\") pod \"must-gather-22p7k\" (UID: \"178ec5fe-9225-4e5a-ac31-fe1ee49042a5\") " pod="openshift-must-gather-vqfsl/must-gather-22p7k" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.527075 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp9wd\" (UniqueName: \"kubernetes.io/projected/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-kube-api-access-hp9wd\") pod \"must-gather-22p7k\" (UID: \"178ec5fe-9225-4e5a-ac31-fe1ee49042a5\") " pod="openshift-must-gather-vqfsl/must-gather-22p7k" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.528062 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-vqfsl"/"default-dockercfg-p2lxw" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.528308 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-vqfsl"/"kube-root-ca.crt" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.532754 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-vqfsl/must-gather-22p7k"] Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.629196 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-must-gather-output\") pod \"must-gather-22p7k\" (UID: \"178ec5fe-9225-4e5a-ac31-fe1ee49042a5\") " pod="openshift-must-gather-vqfsl/must-gather-22p7k" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.629267 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp9wd\" (UniqueName: \"kubernetes.io/projected/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-kube-api-access-hp9wd\") pod \"must-gather-22p7k\" (UID: \"178ec5fe-9225-4e5a-ac31-fe1ee49042a5\") " pod="openshift-must-gather-vqfsl/must-gather-22p7k" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.629607 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-must-gather-output\") pod \"must-gather-22p7k\" (UID: \"178ec5fe-9225-4e5a-ac31-fe1ee49042a5\") " pod="openshift-must-gather-vqfsl/must-gather-22p7k" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.657277 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp9wd\" (UniqueName: \"kubernetes.io/projected/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-kube-api-access-hp9wd\") pod \"must-gather-22p7k\" (UID: \"178ec5fe-9225-4e5a-ac31-fe1ee49042a5\") " pod="openshift-must-gather-vqfsl/must-gather-22p7k" Dec 13 04:35:23 crc kubenswrapper[5070]: I1213 04:35:23.848432 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/must-gather-22p7k" Dec 13 04:35:24 crc kubenswrapper[5070]: I1213 04:35:24.254199 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-vqfsl/must-gather-22p7k"] Dec 13 04:35:24 crc kubenswrapper[5070]: I1213 04:35:24.255356 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 04:35:24 crc kubenswrapper[5070]: I1213 04:35:24.343179 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/must-gather-22p7k" event={"ID":"178ec5fe-9225-4e5a-ac31-fe1ee49042a5","Type":"ContainerStarted","Data":"cfbaff7855c6b0d936dfd0fa90097d8d203c5b94e56ce1cd252eb810f2a05e81"} Dec 13 04:35:33 crc kubenswrapper[5070]: I1213 04:35:33.448636 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/must-gather-22p7k" event={"ID":"178ec5fe-9225-4e5a-ac31-fe1ee49042a5","Type":"ContainerStarted","Data":"c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456"} Dec 13 04:35:34 crc kubenswrapper[5070]: I1213 04:35:34.460226 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/must-gather-22p7k" event={"ID":"178ec5fe-9225-4e5a-ac31-fe1ee49042a5","Type":"ContainerStarted","Data":"37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d"} Dec 13 04:35:34 crc kubenswrapper[5070]: I1213 04:35:34.486251 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-vqfsl/must-gather-22p7k" podStartSLOduration=2.774114506 podStartE2EDuration="11.48621222s" podCreationTimestamp="2025-12-13 04:35:23 +0000 UTC" firstStartedPulling="2025-12-13 04:35:24.255182072 +0000 UTC m=+5016.491025608" lastFinishedPulling="2025-12-13 04:35:32.967279736 +0000 UTC m=+5025.203123322" observedRunningTime="2025-12-13 04:35:34.481348888 +0000 UTC m=+5026.717192464" watchObservedRunningTime="2025-12-13 04:35:34.48621222 +0000 UTC m=+5026.722055786" Dec 13 04:35:36 crc kubenswrapper[5070]: E1213 04:35:36.875090 5070 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.227:57692->38.102.83.227:38159: write tcp 38.102.83.227:57692->38.102.83.227:38159: write: broken pipe Dec 13 04:35:38 crc kubenswrapper[5070]: I1213 04:35:38.633179 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vqfsl/crc-debug-hjfwp"] Dec 13 04:35:38 crc kubenswrapper[5070]: I1213 04:35:38.634896 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" Dec 13 04:35:38 crc kubenswrapper[5070]: I1213 04:35:38.702115 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-host\") pod \"crc-debug-hjfwp\" (UID: \"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7\") " pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" Dec 13 04:35:38 crc kubenswrapper[5070]: I1213 04:35:38.702404 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5p68\" (UniqueName: \"kubernetes.io/projected/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-kube-api-access-f5p68\") pod \"crc-debug-hjfwp\" (UID: \"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7\") " pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" Dec 13 04:35:38 crc kubenswrapper[5070]: I1213 04:35:38.803880 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-host\") pod \"crc-debug-hjfwp\" (UID: \"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7\") " pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" Dec 13 04:35:38 crc kubenswrapper[5070]: I1213 04:35:38.803982 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5p68\" (UniqueName: \"kubernetes.io/projected/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-kube-api-access-f5p68\") pod \"crc-debug-hjfwp\" (UID: \"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7\") " pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" Dec 13 04:35:38 crc kubenswrapper[5070]: I1213 04:35:38.804014 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-host\") pod \"crc-debug-hjfwp\" (UID: \"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7\") " pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" Dec 13 04:35:38 crc kubenswrapper[5070]: I1213 04:35:38.820472 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5p68\" (UniqueName: \"kubernetes.io/projected/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-kube-api-access-f5p68\") pod \"crc-debug-hjfwp\" (UID: \"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7\") " pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" Dec 13 04:35:38 crc kubenswrapper[5070]: I1213 04:35:38.956751 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" Dec 13 04:35:39 crc kubenswrapper[5070]: I1213 04:35:39.513435 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" event={"ID":"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7","Type":"ContainerStarted","Data":"cccd2e83d47c5bbf0995e1fc6826dac6e22d611d0c5711632de9fa787137ae1e"} Dec 13 04:35:50 crc kubenswrapper[5070]: I1213 04:35:50.622697 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" event={"ID":"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7","Type":"ContainerStarted","Data":"0aac63537f4cba329593210eb44f77b76995fdd519be63701c809c814266788d"} Dec 13 04:35:50 crc kubenswrapper[5070]: I1213 04:35:50.644090 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" podStartSLOduration=1.6129313509999998 podStartE2EDuration="12.644074878s" podCreationTimestamp="2025-12-13 04:35:38 +0000 UTC" firstStartedPulling="2025-12-13 04:35:38.987087811 +0000 UTC m=+5031.222931377" lastFinishedPulling="2025-12-13 04:35:50.018231358 +0000 UTC m=+5042.254074904" observedRunningTime="2025-12-13 04:35:50.642672191 +0000 UTC m=+5042.878515737" watchObservedRunningTime="2025-12-13 04:35:50.644074878 +0000 UTC m=+5042.879918424" Dec 13 04:35:51 crc kubenswrapper[5070]: I1213 04:35:51.943063 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:35:51 crc kubenswrapper[5070]: I1213 04:35:51.943618 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:36:21 crc kubenswrapper[5070]: I1213 04:36:21.943269 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:36:21 crc kubenswrapper[5070]: I1213 04:36:21.943848 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:36:42 crc kubenswrapper[5070]: I1213 04:36:42.129423 5070 generic.go:334] "Generic (PLEG): container finished" podID="cb63f73e-b42e-407e-8d75-b8cfd41f6bd7" containerID="0aac63537f4cba329593210eb44f77b76995fdd519be63701c809c814266788d" exitCode=0 Dec 13 04:36:42 crc kubenswrapper[5070]: I1213 04:36:42.129526 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" event={"ID":"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7","Type":"ContainerDied","Data":"0aac63537f4cba329593210eb44f77b76995fdd519be63701c809c814266788d"} Dec 13 04:36:43 crc kubenswrapper[5070]: I1213 04:36:43.279425 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" Dec 13 04:36:43 crc kubenswrapper[5070]: I1213 04:36:43.328191 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vqfsl/crc-debug-hjfwp"] Dec 13 04:36:43 crc kubenswrapper[5070]: I1213 04:36:43.336430 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vqfsl/crc-debug-hjfwp"] Dec 13 04:36:43 crc kubenswrapper[5070]: I1213 04:36:43.434689 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5p68\" (UniqueName: \"kubernetes.io/projected/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-kube-api-access-f5p68\") pod \"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7\" (UID: \"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7\") " Dec 13 04:36:43 crc kubenswrapper[5070]: I1213 04:36:43.434769 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-host\") pod \"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7\" (UID: \"cb63f73e-b42e-407e-8d75-b8cfd41f6bd7\") " Dec 13 04:36:43 crc kubenswrapper[5070]: I1213 04:36:43.434999 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-host" (OuterVolumeSpecName: "host") pod "cb63f73e-b42e-407e-8d75-b8cfd41f6bd7" (UID: "cb63f73e-b42e-407e-8d75-b8cfd41f6bd7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 04:36:43 crc kubenswrapper[5070]: I1213 04:36:43.435616 5070 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-host\") on node \"crc\" DevicePath \"\"" Dec 13 04:36:43 crc kubenswrapper[5070]: I1213 04:36:43.889021 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-kube-api-access-f5p68" (OuterVolumeSpecName: "kube-api-access-f5p68") pod "cb63f73e-b42e-407e-8d75-b8cfd41f6bd7" (UID: "cb63f73e-b42e-407e-8d75-b8cfd41f6bd7"). InnerVolumeSpecName "kube-api-access-f5p68". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:36:43 crc kubenswrapper[5070]: I1213 04:36:43.945830 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5p68\" (UniqueName: \"kubernetes.io/projected/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7-kube-api-access-f5p68\") on node \"crc\" DevicePath \"\"" Dec 13 04:36:44 crc kubenswrapper[5070]: I1213 04:36:44.149576 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cccd2e83d47c5bbf0995e1fc6826dac6e22d611d0c5711632de9fa787137ae1e" Dec 13 04:36:44 crc kubenswrapper[5070]: I1213 04:36:44.149668 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-hjfwp" Dec 13 04:36:44 crc kubenswrapper[5070]: I1213 04:36:44.181853 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb63f73e-b42e-407e-8d75-b8cfd41f6bd7" path="/var/lib/kubelet/pods/cb63f73e-b42e-407e-8d75-b8cfd41f6bd7/volumes" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.100935 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vqfsl/crc-debug-b7nj8"] Dec 13 04:36:45 crc kubenswrapper[5070]: E1213 04:36:45.101325 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb63f73e-b42e-407e-8d75-b8cfd41f6bd7" containerName="container-00" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.101338 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb63f73e-b42e-407e-8d75-b8cfd41f6bd7" containerName="container-00" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.101552 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb63f73e-b42e-407e-8d75-b8cfd41f6bd7" containerName="container-00" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.102122 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.169041 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/058a2d61-481d-4071-ae35-dcb193c2126d-host\") pod \"crc-debug-b7nj8\" (UID: \"058a2d61-481d-4071-ae35-dcb193c2126d\") " pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.169344 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hgbh\" (UniqueName: \"kubernetes.io/projected/058a2d61-481d-4071-ae35-dcb193c2126d-kube-api-access-6hgbh\") pod \"crc-debug-b7nj8\" (UID: \"058a2d61-481d-4071-ae35-dcb193c2126d\") " pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.271819 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/058a2d61-481d-4071-ae35-dcb193c2126d-host\") pod \"crc-debug-b7nj8\" (UID: \"058a2d61-481d-4071-ae35-dcb193c2126d\") " pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.272014 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/058a2d61-481d-4071-ae35-dcb193c2126d-host\") pod \"crc-debug-b7nj8\" (UID: \"058a2d61-481d-4071-ae35-dcb193c2126d\") " pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.272154 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hgbh\" (UniqueName: \"kubernetes.io/projected/058a2d61-481d-4071-ae35-dcb193c2126d-kube-api-access-6hgbh\") pod \"crc-debug-b7nj8\" (UID: \"058a2d61-481d-4071-ae35-dcb193c2126d\") " pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.307598 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hgbh\" (UniqueName: \"kubernetes.io/projected/058a2d61-481d-4071-ae35-dcb193c2126d-kube-api-access-6hgbh\") pod \"crc-debug-b7nj8\" (UID: \"058a2d61-481d-4071-ae35-dcb193c2126d\") " pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" Dec 13 04:36:45 crc kubenswrapper[5070]: I1213 04:36:45.426597 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" Dec 13 04:36:46 crc kubenswrapper[5070]: I1213 04:36:46.168002 5070 generic.go:334] "Generic (PLEG): container finished" podID="058a2d61-481d-4071-ae35-dcb193c2126d" containerID="a50f0ec15f093810b3c459975ae88ae507b869884b68929a909c26de62d9bd82" exitCode=0 Dec 13 04:36:46 crc kubenswrapper[5070]: I1213 04:36:46.181674 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" event={"ID":"058a2d61-481d-4071-ae35-dcb193c2126d","Type":"ContainerDied","Data":"a50f0ec15f093810b3c459975ae88ae507b869884b68929a909c26de62d9bd82"} Dec 13 04:36:46 crc kubenswrapper[5070]: I1213 04:36:46.181732 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" event={"ID":"058a2d61-481d-4071-ae35-dcb193c2126d","Type":"ContainerStarted","Data":"6b758166907944b25078b8779608c2eab45095065b587da3ffbb2ae97359e463"} Dec 13 04:36:47 crc kubenswrapper[5070]: I1213 04:36:47.280675 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" Dec 13 04:36:47 crc kubenswrapper[5070]: I1213 04:36:47.415115 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/058a2d61-481d-4071-ae35-dcb193c2126d-host\") pod \"058a2d61-481d-4071-ae35-dcb193c2126d\" (UID: \"058a2d61-481d-4071-ae35-dcb193c2126d\") " Dec 13 04:36:47 crc kubenswrapper[5070]: I1213 04:36:47.415185 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hgbh\" (UniqueName: \"kubernetes.io/projected/058a2d61-481d-4071-ae35-dcb193c2126d-kube-api-access-6hgbh\") pod \"058a2d61-481d-4071-ae35-dcb193c2126d\" (UID: \"058a2d61-481d-4071-ae35-dcb193c2126d\") " Dec 13 04:36:47 crc kubenswrapper[5070]: I1213 04:36:47.415369 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/058a2d61-481d-4071-ae35-dcb193c2126d-host" (OuterVolumeSpecName: "host") pod "058a2d61-481d-4071-ae35-dcb193c2126d" (UID: "058a2d61-481d-4071-ae35-dcb193c2126d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 04:36:47 crc kubenswrapper[5070]: I1213 04:36:47.415719 5070 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/058a2d61-481d-4071-ae35-dcb193c2126d-host\") on node \"crc\" DevicePath \"\"" Dec 13 04:36:47 crc kubenswrapper[5070]: I1213 04:36:47.422797 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/058a2d61-481d-4071-ae35-dcb193c2126d-kube-api-access-6hgbh" (OuterVolumeSpecName: "kube-api-access-6hgbh") pod "058a2d61-481d-4071-ae35-dcb193c2126d" (UID: "058a2d61-481d-4071-ae35-dcb193c2126d"). InnerVolumeSpecName "kube-api-access-6hgbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:36:47 crc kubenswrapper[5070]: I1213 04:36:47.518769 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hgbh\" (UniqueName: \"kubernetes.io/projected/058a2d61-481d-4071-ae35-dcb193c2126d-kube-api-access-6hgbh\") on node \"crc\" DevicePath \"\"" Dec 13 04:36:48 crc kubenswrapper[5070]: I1213 04:36:48.197230 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" Dec 13 04:36:48 crc kubenswrapper[5070]: I1213 04:36:48.199302 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/crc-debug-b7nj8" event={"ID":"058a2d61-481d-4071-ae35-dcb193c2126d","Type":"ContainerDied","Data":"6b758166907944b25078b8779608c2eab45095065b587da3ffbb2ae97359e463"} Dec 13 04:36:48 crc kubenswrapper[5070]: I1213 04:36:48.199354 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b758166907944b25078b8779608c2eab45095065b587da3ffbb2ae97359e463" Dec 13 04:36:48 crc kubenswrapper[5070]: E1213 04:36:48.602664 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod058a2d61_481d_4071_ae35_dcb193c2126d.slice\": RecentStats: unable to find data in memory cache]" Dec 13 04:36:48 crc kubenswrapper[5070]: I1213 04:36:48.900387 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vqfsl/crc-debug-b7nj8"] Dec 13 04:36:48 crc kubenswrapper[5070]: I1213 04:36:48.909480 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vqfsl/crc-debug-b7nj8"] Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.120009 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vqfsl/crc-debug-n8lwq"] Dec 13 04:36:50 crc kubenswrapper[5070]: E1213 04:36:50.120750 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="058a2d61-481d-4071-ae35-dcb193c2126d" containerName="container-00" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.120763 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="058a2d61-481d-4071-ae35-dcb193c2126d" containerName="container-00" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.120969 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="058a2d61-481d-4071-ae35-dcb193c2126d" containerName="container-00" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.121799 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.130663 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpf66\" (UniqueName: \"kubernetes.io/projected/400a47b5-51be-4f86-a6a0-360d6b05817c-kube-api-access-zpf66\") pod \"crc-debug-n8lwq\" (UID: \"400a47b5-51be-4f86-a6a0-360d6b05817c\") " pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.131155 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/400a47b5-51be-4f86-a6a0-360d6b05817c-host\") pod \"crc-debug-n8lwq\" (UID: \"400a47b5-51be-4f86-a6a0-360d6b05817c\") " pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.180136 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="058a2d61-481d-4071-ae35-dcb193c2126d" path="/var/lib/kubelet/pods/058a2d61-481d-4071-ae35-dcb193c2126d/volumes" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.232936 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/400a47b5-51be-4f86-a6a0-360d6b05817c-host\") pod \"crc-debug-n8lwq\" (UID: \"400a47b5-51be-4f86-a6a0-360d6b05817c\") " pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.233069 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpf66\" (UniqueName: \"kubernetes.io/projected/400a47b5-51be-4f86-a6a0-360d6b05817c-kube-api-access-zpf66\") pod \"crc-debug-n8lwq\" (UID: \"400a47b5-51be-4f86-a6a0-360d6b05817c\") " pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.233075 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/400a47b5-51be-4f86-a6a0-360d6b05817c-host\") pod \"crc-debug-n8lwq\" (UID: \"400a47b5-51be-4f86-a6a0-360d6b05817c\") " pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.259640 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpf66\" (UniqueName: \"kubernetes.io/projected/400a47b5-51be-4f86-a6a0-360d6b05817c-kube-api-access-zpf66\") pod \"crc-debug-n8lwq\" (UID: \"400a47b5-51be-4f86-a6a0-360d6b05817c\") " pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" Dec 13 04:36:50 crc kubenswrapper[5070]: I1213 04:36:50.440904 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" Dec 13 04:36:50 crc kubenswrapper[5070]: W1213 04:36:50.488140 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod400a47b5_51be_4f86_a6a0_360d6b05817c.slice/crio-cc8616462d64970b57c56cdda766a8994829efe573a6693b35d7258c26419658 WatchSource:0}: Error finding container cc8616462d64970b57c56cdda766a8994829efe573a6693b35d7258c26419658: Status 404 returned error can't find the container with id cc8616462d64970b57c56cdda766a8994829efe573a6693b35d7258c26419658 Dec 13 04:36:51 crc kubenswrapper[5070]: I1213 04:36:51.345502 5070 generic.go:334] "Generic (PLEG): container finished" podID="400a47b5-51be-4f86-a6a0-360d6b05817c" containerID="2f4927d7e5d05b3bfaeb38336c580632bffc072912d9dc7fee6dabc1015af8da" exitCode=0 Dec 13 04:36:51 crc kubenswrapper[5070]: I1213 04:36:51.345743 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" event={"ID":"400a47b5-51be-4f86-a6a0-360d6b05817c","Type":"ContainerDied","Data":"2f4927d7e5d05b3bfaeb38336c580632bffc072912d9dc7fee6dabc1015af8da"} Dec 13 04:36:51 crc kubenswrapper[5070]: I1213 04:36:51.345882 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" event={"ID":"400a47b5-51be-4f86-a6a0-360d6b05817c","Type":"ContainerStarted","Data":"cc8616462d64970b57c56cdda766a8994829efe573a6693b35d7258c26419658"} Dec 13 04:36:51 crc kubenswrapper[5070]: I1213 04:36:51.403981 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vqfsl/crc-debug-n8lwq"] Dec 13 04:36:51 crc kubenswrapper[5070]: I1213 04:36:51.414746 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vqfsl/crc-debug-n8lwq"] Dec 13 04:36:51 crc kubenswrapper[5070]: I1213 04:36:51.943209 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:36:51 crc kubenswrapper[5070]: I1213 04:36:51.943684 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:36:51 crc kubenswrapper[5070]: I1213 04:36:51.943745 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 04:36:51 crc kubenswrapper[5070]: I1213 04:36:51.944630 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c34f4a52bb9647b9f7bcdab3043ab312c6ab456ed8b7b789d6017a360b184ec2"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 04:36:51 crc kubenswrapper[5070]: I1213 04:36:51.944705 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://c34f4a52bb9647b9f7bcdab3043ab312c6ab456ed8b7b789d6017a360b184ec2" gracePeriod=600 Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.361350 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lq74q"] Dec 13 04:36:52 crc kubenswrapper[5070]: E1213 04:36:52.362570 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="400a47b5-51be-4f86-a6a0-360d6b05817c" containerName="container-00" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.362591 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="400a47b5-51be-4f86-a6a0-360d6b05817c" containerName="container-00" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.363095 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="400a47b5-51be-4f86-a6a0-360d6b05817c" containerName="container-00" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.364059 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="c34f4a52bb9647b9f7bcdab3043ab312c6ab456ed8b7b789d6017a360b184ec2" exitCode=0 Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.365843 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"c34f4a52bb9647b9f7bcdab3043ab312c6ab456ed8b7b789d6017a360b184ec2"} Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.365889 5070 scope.go:117] "RemoveContainer" containerID="48ff8357a2b3ee4e83f57e2cb975421a99bd1b2855b387a2135e4094c2c1ae0e" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.366057 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.405546 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lq74q"] Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.485959 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9p4c\" (UniqueName: \"kubernetes.io/projected/5bff6e2c-8357-4b44-854d-8ff82897ee24-kube-api-access-z9p4c\") pod \"community-operators-lq74q\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.486167 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-catalog-content\") pod \"community-operators-lq74q\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.486279 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-utilities\") pod \"community-operators-lq74q\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.487057 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.587144 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpf66\" (UniqueName: \"kubernetes.io/projected/400a47b5-51be-4f86-a6a0-360d6b05817c-kube-api-access-zpf66\") pod \"400a47b5-51be-4f86-a6a0-360d6b05817c\" (UID: \"400a47b5-51be-4f86-a6a0-360d6b05817c\") " Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.587211 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/400a47b5-51be-4f86-a6a0-360d6b05817c-host\") pod \"400a47b5-51be-4f86-a6a0-360d6b05817c\" (UID: \"400a47b5-51be-4f86-a6a0-360d6b05817c\") " Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.587334 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/400a47b5-51be-4f86-a6a0-360d6b05817c-host" (OuterVolumeSpecName: "host") pod "400a47b5-51be-4f86-a6a0-360d6b05817c" (UID: "400a47b5-51be-4f86-a6a0-360d6b05817c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.587589 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-catalog-content\") pod \"community-operators-lq74q\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.587693 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-utilities\") pod \"community-operators-lq74q\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.587814 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9p4c\" (UniqueName: \"kubernetes.io/projected/5bff6e2c-8357-4b44-854d-8ff82897ee24-kube-api-access-z9p4c\") pod \"community-operators-lq74q\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.587879 5070 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/400a47b5-51be-4f86-a6a0-360d6b05817c-host\") on node \"crc\" DevicePath \"\"" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.588111 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-catalog-content\") pod \"community-operators-lq74q\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.588423 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-utilities\") pod \"community-operators-lq74q\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.599337 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/400a47b5-51be-4f86-a6a0-360d6b05817c-kube-api-access-zpf66" (OuterVolumeSpecName: "kube-api-access-zpf66") pod "400a47b5-51be-4f86-a6a0-360d6b05817c" (UID: "400a47b5-51be-4f86-a6a0-360d6b05817c"). InnerVolumeSpecName "kube-api-access-zpf66". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.613210 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9p4c\" (UniqueName: \"kubernetes.io/projected/5bff6e2c-8357-4b44-854d-8ff82897ee24-kube-api-access-z9p4c\") pod \"community-operators-lq74q\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.696771 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpf66\" (UniqueName: \"kubernetes.io/projected/400a47b5-51be-4f86-a6a0-360d6b05817c-kube-api-access-zpf66\") on node \"crc\" DevicePath \"\"" Dec 13 04:36:52 crc kubenswrapper[5070]: I1213 04:36:52.783901 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:36:53 crc kubenswrapper[5070]: I1213 04:36:53.283299 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lq74q"] Dec 13 04:36:53 crc kubenswrapper[5070]: W1213 04:36:53.284966 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5bff6e2c_8357_4b44_854d_8ff82897ee24.slice/crio-6bbfe8745fc04b217758859c1ae72dc2faf546b291aa69ec1f1bc54351a115e5 WatchSource:0}: Error finding container 6bbfe8745fc04b217758859c1ae72dc2faf546b291aa69ec1f1bc54351a115e5: Status 404 returned error can't find the container with id 6bbfe8745fc04b217758859c1ae72dc2faf546b291aa69ec1f1bc54351a115e5 Dec 13 04:36:53 crc kubenswrapper[5070]: I1213 04:36:53.384502 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq74q" event={"ID":"5bff6e2c-8357-4b44-854d-8ff82897ee24","Type":"ContainerStarted","Data":"6bbfe8745fc04b217758859c1ae72dc2faf546b291aa69ec1f1bc54351a115e5"} Dec 13 04:36:53 crc kubenswrapper[5070]: I1213 04:36:53.387254 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/crc-debug-n8lwq" Dec 13 04:36:53 crc kubenswrapper[5070]: I1213 04:36:53.387272 5070 scope.go:117] "RemoveContainer" containerID="2f4927d7e5d05b3bfaeb38336c580632bffc072912d9dc7fee6dabc1015af8da" Dec 13 04:36:53 crc kubenswrapper[5070]: I1213 04:36:53.391516 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0"} Dec 13 04:36:54 crc kubenswrapper[5070]: I1213 04:36:54.188168 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="400a47b5-51be-4f86-a6a0-360d6b05817c" path="/var/lib/kubelet/pods/400a47b5-51be-4f86-a6a0-360d6b05817c/volumes" Dec 13 04:36:55 crc kubenswrapper[5070]: I1213 04:36:55.448929 5070 generic.go:334] "Generic (PLEG): container finished" podID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerID="28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a" exitCode=0 Dec 13 04:36:55 crc kubenswrapper[5070]: I1213 04:36:55.448988 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq74q" event={"ID":"5bff6e2c-8357-4b44-854d-8ff82897ee24","Type":"ContainerDied","Data":"28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a"} Dec 13 04:36:57 crc kubenswrapper[5070]: I1213 04:36:57.468775 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq74q" event={"ID":"5bff6e2c-8357-4b44-854d-8ff82897ee24","Type":"ContainerStarted","Data":"401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e"} Dec 13 04:36:58 crc kubenswrapper[5070]: I1213 04:36:58.490413 5070 generic.go:334] "Generic (PLEG): container finished" podID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerID="401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e" exitCode=0 Dec 13 04:36:58 crc kubenswrapper[5070]: I1213 04:36:58.490522 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq74q" event={"ID":"5bff6e2c-8357-4b44-854d-8ff82897ee24","Type":"ContainerDied","Data":"401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e"} Dec 13 04:36:58 crc kubenswrapper[5070]: E1213 04:36:58.862926 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod058a2d61_481d_4071_ae35_dcb193c2126d.slice\": RecentStats: unable to find data in memory cache]" Dec 13 04:36:59 crc kubenswrapper[5070]: I1213 04:36:59.501813 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq74q" event={"ID":"5bff6e2c-8357-4b44-854d-8ff82897ee24","Type":"ContainerStarted","Data":"7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0"} Dec 13 04:36:59 crc kubenswrapper[5070]: I1213 04:36:59.528024 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lq74q" podStartSLOduration=4.040919672 podStartE2EDuration="7.528003697s" podCreationTimestamp="2025-12-13 04:36:52 +0000 UTC" firstStartedPulling="2025-12-13 04:36:55.451410967 +0000 UTC m=+5107.687254513" lastFinishedPulling="2025-12-13 04:36:58.938494972 +0000 UTC m=+5111.174338538" observedRunningTime="2025-12-13 04:36:59.521914371 +0000 UTC m=+5111.757757917" watchObservedRunningTime="2025-12-13 04:36:59.528003697 +0000 UTC m=+5111.763847253" Dec 13 04:37:02 crc kubenswrapper[5070]: I1213 04:37:02.784998 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:37:02 crc kubenswrapper[5070]: I1213 04:37:02.785947 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:37:02 crc kubenswrapper[5070]: I1213 04:37:02.876092 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.589283 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n8f92"] Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.593956 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.606788 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8f92"] Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.753746 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrmdk\" (UniqueName: \"kubernetes.io/projected/fca679a6-3edb-4cfa-a9bb-de299bc26562-kube-api-access-lrmdk\") pod \"redhat-marketplace-n8f92\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.754555 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-catalog-content\") pod \"redhat-marketplace-n8f92\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.754625 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-utilities\") pod \"redhat-marketplace-n8f92\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.857287 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrmdk\" (UniqueName: \"kubernetes.io/projected/fca679a6-3edb-4cfa-a9bb-de299bc26562-kube-api-access-lrmdk\") pod \"redhat-marketplace-n8f92\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.857395 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-catalog-content\") pod \"redhat-marketplace-n8f92\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.857525 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-utilities\") pod \"redhat-marketplace-n8f92\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.858527 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-catalog-content\") pod \"redhat-marketplace-n8f92\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:07 crc kubenswrapper[5070]: I1213 04:37:07.858739 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-utilities\") pod \"redhat-marketplace-n8f92\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:08 crc kubenswrapper[5070]: I1213 04:37:08.083199 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrmdk\" (UniqueName: \"kubernetes.io/projected/fca679a6-3edb-4cfa-a9bb-de299bc26562-kube-api-access-lrmdk\") pod \"redhat-marketplace-n8f92\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:08 crc kubenswrapper[5070]: I1213 04:37:08.215549 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:08 crc kubenswrapper[5070]: I1213 04:37:08.666399 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8f92"] Dec 13 04:37:08 crc kubenswrapper[5070]: W1213 04:37:08.684866 5070 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfca679a6_3edb_4cfa_a9bb_de299bc26562.slice/crio-0ce374a2a57e1861a350505817541c3dd346664dd926dd17223f8efa7584cdd1 WatchSource:0}: Error finding container 0ce374a2a57e1861a350505817541c3dd346664dd926dd17223f8efa7584cdd1: Status 404 returned error can't find the container with id 0ce374a2a57e1861a350505817541c3dd346664dd926dd17223f8efa7584cdd1 Dec 13 04:37:09 crc kubenswrapper[5070]: E1213 04:37:09.115997 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod058a2d61_481d_4071_ae35_dcb193c2126d.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfca679a6_3edb_4cfa_a9bb_de299bc26562.slice/crio-aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfca679a6_3edb_4cfa_a9bb_de299bc26562.slice/crio-conmon-aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c.scope\": RecentStats: unable to find data in memory cache]" Dec 13 04:37:09 crc kubenswrapper[5070]: I1213 04:37:09.618118 5070 generic.go:334] "Generic (PLEG): container finished" podID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerID="aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c" exitCode=0 Dec 13 04:37:09 crc kubenswrapper[5070]: I1213 04:37:09.618257 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8f92" event={"ID":"fca679a6-3edb-4cfa-a9bb-de299bc26562","Type":"ContainerDied","Data":"aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c"} Dec 13 04:37:09 crc kubenswrapper[5070]: I1213 04:37:09.618552 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8f92" event={"ID":"fca679a6-3edb-4cfa-a9bb-de299bc26562","Type":"ContainerStarted","Data":"0ce374a2a57e1861a350505817541c3dd346664dd926dd17223f8efa7584cdd1"} Dec 13 04:37:11 crc kubenswrapper[5070]: I1213 04:37:11.639093 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8f92" event={"ID":"fca679a6-3edb-4cfa-a9bb-de299bc26562","Type":"ContainerStarted","Data":"39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8"} Dec 13 04:37:12 crc kubenswrapper[5070]: I1213 04:37:12.650387 5070 generic.go:334] "Generic (PLEG): container finished" podID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerID="39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8" exitCode=0 Dec 13 04:37:12 crc kubenswrapper[5070]: I1213 04:37:12.650473 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8f92" event={"ID":"fca679a6-3edb-4cfa-a9bb-de299bc26562","Type":"ContainerDied","Data":"39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8"} Dec 13 04:37:12 crc kubenswrapper[5070]: I1213 04:37:12.843271 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:37:13 crc kubenswrapper[5070]: I1213 04:37:13.662464 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8f92" event={"ID":"fca679a6-3edb-4cfa-a9bb-de299bc26562","Type":"ContainerStarted","Data":"ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b"} Dec 13 04:37:13 crc kubenswrapper[5070]: I1213 04:37:13.691581 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n8f92" podStartSLOduration=3.215004154 podStartE2EDuration="6.691558013s" podCreationTimestamp="2025-12-13 04:37:07 +0000 UTC" firstStartedPulling="2025-12-13 04:37:09.622282232 +0000 UTC m=+5121.858125808" lastFinishedPulling="2025-12-13 04:37:13.098836131 +0000 UTC m=+5125.334679667" observedRunningTime="2025-12-13 04:37:13.681250054 +0000 UTC m=+5125.917093600" watchObservedRunningTime="2025-12-13 04:37:13.691558013 +0000 UTC m=+5125.927401569" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.179369 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lq74q"] Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.179685 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lq74q" podUID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerName="registry-server" containerID="cri-o://7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0" gracePeriod=2 Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.669215 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.679947 5070 generic.go:334] "Generic (PLEG): container finished" podID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerID="7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0" exitCode=0 Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.680015 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq74q" event={"ID":"5bff6e2c-8357-4b44-854d-8ff82897ee24","Type":"ContainerDied","Data":"7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0"} Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.680085 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lq74q" event={"ID":"5bff6e2c-8357-4b44-854d-8ff82897ee24","Type":"ContainerDied","Data":"6bbfe8745fc04b217758859c1ae72dc2faf546b291aa69ec1f1bc54351a115e5"} Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.680108 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lq74q" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.680110 5070 scope.go:117] "RemoveContainer" containerID="7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.703998 5070 scope.go:117] "RemoveContainer" containerID="401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.735622 5070 scope.go:117] "RemoveContainer" containerID="28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.780489 5070 scope.go:117] "RemoveContainer" containerID="7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0" Dec 13 04:37:14 crc kubenswrapper[5070]: E1213 04:37:14.781354 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0\": container with ID starting with 7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0 not found: ID does not exist" containerID="7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.781400 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0"} err="failed to get container status \"7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0\": rpc error: code = NotFound desc = could not find container \"7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0\": container with ID starting with 7ba2609a6084e728130f002f7283de8f42095745ff7a9f0c9137b7bae5fdc9e0 not found: ID does not exist" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.781425 5070 scope.go:117] "RemoveContainer" containerID="401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e" Dec 13 04:37:14 crc kubenswrapper[5070]: E1213 04:37:14.781809 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e\": container with ID starting with 401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e not found: ID does not exist" containerID="401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.781848 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e"} err="failed to get container status \"401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e\": rpc error: code = NotFound desc = could not find container \"401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e\": container with ID starting with 401f96ce532cc6d89562403404ca01a0a1da4318ed2f44936f9370364461e03e not found: ID does not exist" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.781874 5070 scope.go:117] "RemoveContainer" containerID="28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a" Dec 13 04:37:14 crc kubenswrapper[5070]: E1213 04:37:14.782132 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a\": container with ID starting with 28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a not found: ID does not exist" containerID="28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.782161 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a"} err="failed to get container status \"28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a\": rpc error: code = NotFound desc = could not find container \"28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a\": container with ID starting with 28693926ce5c4151103fb70eed2af6a5219bfc2deb1bd2258b8764c3be93900a not found: ID does not exist" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.791813 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9p4c\" (UniqueName: \"kubernetes.io/projected/5bff6e2c-8357-4b44-854d-8ff82897ee24-kube-api-access-z9p4c\") pod \"5bff6e2c-8357-4b44-854d-8ff82897ee24\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.792179 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-catalog-content\") pod \"5bff6e2c-8357-4b44-854d-8ff82897ee24\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.792358 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-utilities\") pod \"5bff6e2c-8357-4b44-854d-8ff82897ee24\" (UID: \"5bff6e2c-8357-4b44-854d-8ff82897ee24\") " Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.793206 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-utilities" (OuterVolumeSpecName: "utilities") pod "5bff6e2c-8357-4b44-854d-8ff82897ee24" (UID: "5bff6e2c-8357-4b44-854d-8ff82897ee24"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.797343 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bff6e2c-8357-4b44-854d-8ff82897ee24-kube-api-access-z9p4c" (OuterVolumeSpecName: "kube-api-access-z9p4c") pod "5bff6e2c-8357-4b44-854d-8ff82897ee24" (UID: "5bff6e2c-8357-4b44-854d-8ff82897ee24"). InnerVolumeSpecName "kube-api-access-z9p4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.843256 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5bff6e2c-8357-4b44-854d-8ff82897ee24" (UID: "5bff6e2c-8357-4b44-854d-8ff82897ee24"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.895354 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.895387 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9p4c\" (UniqueName: \"kubernetes.io/projected/5bff6e2c-8357-4b44-854d-8ff82897ee24-kube-api-access-z9p4c\") on node \"crc\" DevicePath \"\"" Dec 13 04:37:14 crc kubenswrapper[5070]: I1213 04:37:14.895399 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5bff6e2c-8357-4b44-854d-8ff82897ee24-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:37:15 crc kubenswrapper[5070]: I1213 04:37:15.014000 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lq74q"] Dec 13 04:37:15 crc kubenswrapper[5070]: I1213 04:37:15.028929 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lq74q"] Dec 13 04:37:16 crc kubenswrapper[5070]: I1213 04:37:16.182705 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bff6e2c-8357-4b44-854d-8ff82897ee24" path="/var/lib/kubelet/pods/5bff6e2c-8357-4b44-854d-8ff82897ee24/volumes" Dec 13 04:37:18 crc kubenswrapper[5070]: I1213 04:37:18.216302 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:18 crc kubenswrapper[5070]: I1213 04:37:18.216780 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:18 crc kubenswrapper[5070]: I1213 04:37:18.639368 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:18 crc kubenswrapper[5070]: I1213 04:37:18.791378 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:19 crc kubenswrapper[5070]: I1213 04:37:19.182423 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8f92"] Dec 13 04:37:19 crc kubenswrapper[5070]: E1213 04:37:19.343054 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod058a2d61_481d_4071_ae35_dcb193c2126d.slice\": RecentStats: unable to find data in memory cache]" Dec 13 04:37:20 crc kubenswrapper[5070]: I1213 04:37:20.760251 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n8f92" podUID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerName="registry-server" containerID="cri-o://ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b" gracePeriod=2 Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.379261 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.543163 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-catalog-content\") pod \"fca679a6-3edb-4cfa-a9bb-de299bc26562\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.543293 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrmdk\" (UniqueName: \"kubernetes.io/projected/fca679a6-3edb-4cfa-a9bb-de299bc26562-kube-api-access-lrmdk\") pod \"fca679a6-3edb-4cfa-a9bb-de299bc26562\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.543573 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-utilities\") pod \"fca679a6-3edb-4cfa-a9bb-de299bc26562\" (UID: \"fca679a6-3edb-4cfa-a9bb-de299bc26562\") " Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.544269 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-utilities" (OuterVolumeSpecName: "utilities") pod "fca679a6-3edb-4cfa-a9bb-de299bc26562" (UID: "fca679a6-3edb-4cfa-a9bb-de299bc26562"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.553316 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fca679a6-3edb-4cfa-a9bb-de299bc26562-kube-api-access-lrmdk" (OuterVolumeSpecName: "kube-api-access-lrmdk") pod "fca679a6-3edb-4cfa-a9bb-de299bc26562" (UID: "fca679a6-3edb-4cfa-a9bb-de299bc26562"). InnerVolumeSpecName "kube-api-access-lrmdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.569132 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fca679a6-3edb-4cfa-a9bb-de299bc26562" (UID: "fca679a6-3edb-4cfa-a9bb-de299bc26562"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.646329 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrmdk\" (UniqueName: \"kubernetes.io/projected/fca679a6-3edb-4cfa-a9bb-de299bc26562-kube-api-access-lrmdk\") on node \"crc\" DevicePath \"\"" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.646686 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.646699 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca679a6-3edb-4cfa-a9bb-de299bc26562-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.769956 5070 generic.go:334] "Generic (PLEG): container finished" podID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerID="ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b" exitCode=0 Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.770008 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8f92" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.770021 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8f92" event={"ID":"fca679a6-3edb-4cfa-a9bb-de299bc26562","Type":"ContainerDied","Data":"ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b"} Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.770059 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8f92" event={"ID":"fca679a6-3edb-4cfa-a9bb-de299bc26562","Type":"ContainerDied","Data":"0ce374a2a57e1861a350505817541c3dd346664dd926dd17223f8efa7584cdd1"} Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.770078 5070 scope.go:117] "RemoveContainer" containerID="ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.794206 5070 scope.go:117] "RemoveContainer" containerID="39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.812170 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8f92"] Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.822292 5070 scope.go:117] "RemoveContainer" containerID="aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.824755 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8f92"] Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.863064 5070 scope.go:117] "RemoveContainer" containerID="ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b" Dec 13 04:37:21 crc kubenswrapper[5070]: E1213 04:37:21.863676 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b\": container with ID starting with ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b not found: ID does not exist" containerID="ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.863720 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b"} err="failed to get container status \"ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b\": rpc error: code = NotFound desc = could not find container \"ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b\": container with ID starting with ae74d861453fd80ecf97700778911555143d53c3b0f507c3293f38abf615455b not found: ID does not exist" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.863745 5070 scope.go:117] "RemoveContainer" containerID="39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8" Dec 13 04:37:21 crc kubenswrapper[5070]: E1213 04:37:21.864339 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8\": container with ID starting with 39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8 not found: ID does not exist" containerID="39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.864394 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8"} err="failed to get container status \"39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8\": rpc error: code = NotFound desc = could not find container \"39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8\": container with ID starting with 39d45b941a281ea55c0d1d95e430f99da86f41b027405c0f20bd05b72149eaa8 not found: ID does not exist" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.864423 5070 scope.go:117] "RemoveContainer" containerID="aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c" Dec 13 04:37:21 crc kubenswrapper[5070]: E1213 04:37:21.864822 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c\": container with ID starting with aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c not found: ID does not exist" containerID="aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c" Dec 13 04:37:21 crc kubenswrapper[5070]: I1213 04:37:21.864853 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c"} err="failed to get container status \"aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c\": rpc error: code = NotFound desc = could not find container \"aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c\": container with ID starting with aa8537f6b019c0f767f6b0963a1a53e79c9bcb0d6700d1cf21b99b4bb020390c not found: ID does not exist" Dec 13 04:37:22 crc kubenswrapper[5070]: I1213 04:37:22.177827 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fca679a6-3edb-4cfa-a9bb-de299bc26562" path="/var/lib/kubelet/pods/fca679a6-3edb-4cfa-a9bb-de299bc26562/volumes" Dec 13 04:37:22 crc kubenswrapper[5070]: I1213 04:37:22.681308 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7d678df9bd-j8bfh_c23e095e-de7a-419f-b6d4-1ca536a3069b/barbican-api/0.log" Dec 13 04:37:22 crc kubenswrapper[5070]: I1213 04:37:22.875774 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7d678df9bd-j8bfh_c23e095e-de7a-419f-b6d4-1ca536a3069b/barbican-api-log/0.log" Dec 13 04:37:22 crc kubenswrapper[5070]: I1213 04:37:22.935830 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-8674755ffd-cl8xp_fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d/barbican-keystone-listener/0.log" Dec 13 04:37:23 crc kubenswrapper[5070]: I1213 04:37:23.299537 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6ff985df9f-bj7dz_e8acefb2-398a-4cad-aaf8-7a72714b0ac9/barbican-worker/0.log" Dec 13 04:37:23 crc kubenswrapper[5070]: I1213 04:37:23.327944 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6ff985df9f-bj7dz_e8acefb2-398a-4cad-aaf8-7a72714b0ac9/barbican-worker-log/0.log" Dec 13 04:37:23 crc kubenswrapper[5070]: I1213 04:37:23.338501 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-8674755ffd-cl8xp_fd4d7f22-4b6c-4dd8-ad9d-ba43bd6d666d/barbican-keystone-listener-log/0.log" Dec 13 04:37:23 crc kubenswrapper[5070]: I1213 04:37:23.510589 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-cmh89_7859b77b-28d9-4fb3-83c6-d38cda4ecba5/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:23 crc kubenswrapper[5070]: I1213 04:37:23.550755 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ecb1773c-197e-4e3a-9c38-9106ae722cbe/ceilometer-central-agent/0.log" Dec 13 04:37:23 crc kubenswrapper[5070]: I1213 04:37:23.685101 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ecb1773c-197e-4e3a-9c38-9106ae722cbe/ceilometer-notification-agent/0.log" Dec 13 04:37:23 crc kubenswrapper[5070]: I1213 04:37:23.739881 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ecb1773c-197e-4e3a-9c38-9106ae722cbe/proxy-httpd/0.log" Dec 13 04:37:23 crc kubenswrapper[5070]: I1213 04:37:23.796026 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ecb1773c-197e-4e3a-9c38-9106ae722cbe/sg-core/0.log" Dec 13 04:37:23 crc kubenswrapper[5070]: I1213 04:37:23.886278 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-jvrl7_dcb9f2b5-3392-400a-9071-39b873d26bca/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:23 crc kubenswrapper[5070]: I1213 04:37:23.994472 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-vc52p_3dc4fcae-76ad-430d-968e-9bb5e53d589e/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:24 crc kubenswrapper[5070]: I1213 04:37:24.537586 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b/probe/0.log" Dec 13 04:37:24 crc kubenswrapper[5070]: I1213 04:37:24.758688 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6d14911c-95e8-4604-af3a-efcea2eb4b73/cinder-api/0.log" Dec 13 04:37:24 crc kubenswrapper[5070]: I1213 04:37:24.979887 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6d14911c-95e8-4604-af3a-efcea2eb4b73/cinder-api-log/0.log" Dec 13 04:37:24 crc kubenswrapper[5070]: I1213 04:37:24.986384 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_21e7cf18-aaa7-458d-b7fa-e1084b6d7e4b/cinder-backup/0.log" Dec 13 04:37:25 crc kubenswrapper[5070]: I1213 04:37:25.009098 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_e4b6c4bc-0c14-4506-944f-fea34a040871/cinder-scheduler/0.log" Dec 13 04:37:25 crc kubenswrapper[5070]: I1213 04:37:25.058799 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_e4b6c4bc-0c14-4506-944f-fea34a040871/probe/0.log" Dec 13 04:37:25 crc kubenswrapper[5070]: I1213 04:37:25.263205 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_8fd14d72-1dc4-4ab9-8b92-27e740e7eada/probe/0.log" Dec 13 04:37:25 crc kubenswrapper[5070]: I1213 04:37:25.392547 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-j7nn8_8c5ddd00-19e7-48fa-8573-a1f65b853c3e/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:25 crc kubenswrapper[5070]: I1213 04:37:25.577544 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-s9v7b_3c7f94e1-5650-48ce-bccd-827f0cb55d76/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:25 crc kubenswrapper[5070]: I1213 04:37:25.837054 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69655fd4bf-4hws8_b619a970-f642-404b-8617-fc0137ac6d56/init/0.log" Dec 13 04:37:26 crc kubenswrapper[5070]: I1213 04:37:26.054577 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69655fd4bf-4hws8_b619a970-f642-404b-8617-fc0137ac6d56/init/0.log" Dec 13 04:37:26 crc kubenswrapper[5070]: I1213 04:37:26.188183 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-69655fd4bf-4hws8_b619a970-f642-404b-8617-fc0137ac6d56/dnsmasq-dns/0.log" Dec 13 04:37:26 crc kubenswrapper[5070]: I1213 04:37:26.198468 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_48089201-da81-4de1-be8f-3a832ff67774/glance-httpd/0.log" Dec 13 04:37:26 crc kubenswrapper[5070]: I1213 04:37:26.277622 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_48089201-da81-4de1-be8f-3a832ff67774/glance-log/0.log" Dec 13 04:37:26 crc kubenswrapper[5070]: I1213 04:37:26.403217 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_69a11b54-bec4-4a0e-b498-94747f0c3e37/glance-log/0.log" Dec 13 04:37:26 crc kubenswrapper[5070]: I1213 04:37:26.432375 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_69a11b54-bec4-4a0e-b498-94747f0c3e37/glance-httpd/0.log" Dec 13 04:37:26 crc kubenswrapper[5070]: I1213 04:37:26.756568 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-8655d596d8-lz82d_c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde/horizon/0.log" Dec 13 04:37:26 crc kubenswrapper[5070]: I1213 04:37:26.812972 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-vkbfm_55e3e391-74a5-4af9-85de-df93ac3155e0/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:26 crc kubenswrapper[5070]: I1213 04:37:26.985143 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-qhqcm_d9ad7e67-58c9-4cee-8154-dc5119e96687/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:27 crc kubenswrapper[5070]: I1213 04:37:27.024916 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-8655d596d8-lz82d_c1f8d0b8-5e4f-4237-a0f3-fa38d0c2bfde/horizon-log/0.log" Dec 13 04:37:27 crc kubenswrapper[5070]: I1213 04:37:27.243234 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29426641-b6wg6_3b5bc78b-9491-4608-ba14-e198834b091a/keystone-cron/0.log" Dec 13 04:37:27 crc kubenswrapper[5070]: I1213 04:37:27.402183 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_708b0d41-c778-4856-b02c-895e2c15e3e3/kube-state-metrics/0.log" Dec 13 04:37:27 crc kubenswrapper[5070]: I1213 04:37:27.673108 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-ls75m_244cbfa6-dea4-4fae-b3f5-582f53c21551/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:27 crc kubenswrapper[5070]: I1213 04:37:27.926510 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-58cd5c7d4f-c98c8_5e66755e-b36f-4931-bc87-3fcecfc5c1b4/keystone-api/0.log" Dec 13 04:37:27 crc kubenswrapper[5070]: I1213 04:37:27.927111 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_1e8258a6-caa4-4149-b318-2f985cb3ccdc/manila-api/0.log" Dec 13 04:37:28 crc kubenswrapper[5070]: I1213 04:37:28.180783 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_1e8258a6-caa4-4149-b318-2f985cb3ccdc/manila-api-log/0.log" Dec 13 04:37:28 crc kubenswrapper[5070]: I1213 04:37:28.229607 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_7227b03a-84e0-4c9b-bb1a-baaaeb6828a9/probe/0.log" Dec 13 04:37:28 crc kubenswrapper[5070]: I1213 04:37:28.242434 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_7227b03a-84e0-4c9b-bb1a-baaaeb6828a9/manila-scheduler/0.log" Dec 13 04:37:28 crc kubenswrapper[5070]: I1213 04:37:28.427147 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_f73fc610-e05f-414a-a55a-17cdfdf6c3d6/manila-share/0.log" Dec 13 04:37:28 crc kubenswrapper[5070]: I1213 04:37:28.465146 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_f73fc610-e05f-414a-a55a-17cdfdf6c3d6/probe/0.log" Dec 13 04:37:28 crc kubenswrapper[5070]: I1213 04:37:28.937735 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-54586d498f-pgsrg_93c84134-01b6-453a-95d9-7f2a3e9a4f3d/neutron-httpd/0.log" Dec 13 04:37:29 crc kubenswrapper[5070]: I1213 04:37:29.042730 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-54586d498f-pgsrg_93c84134-01b6-453a-95d9-7f2a3e9a4f3d/neutron-api/0.log" Dec 13 04:37:29 crc kubenswrapper[5070]: I1213 04:37:29.055968 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-v4vt4_c87c7a56-123d-47b8-8e94-245995b89e61/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:29 crc kubenswrapper[5070]: E1213 04:37:29.564991 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod058a2d61_481d_4071_ae35_dcb193c2126d.slice\": RecentStats: unable to find data in memory cache]" Dec 13 04:37:29 crc kubenswrapper[5070]: I1213 04:37:29.934405 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_01ff34fb-3f08-4462-9ec2-7ed2b05b114b/nova-api-log/0.log" Dec 13 04:37:29 crc kubenswrapper[5070]: I1213 04:37:29.998494 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ee77d0d3-3a57-4105-8561-f7c73ddb8117/nova-cell0-conductor-conductor/0.log" Dec 13 04:37:30 crc kubenswrapper[5070]: I1213 04:37:30.353284 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_e71ec192-d2c9-4dab-9063-5a6639ecb927/nova-cell1-conductor-conductor/0.log" Dec 13 04:37:30 crc kubenswrapper[5070]: I1213 04:37:30.449815 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_01ff34fb-3f08-4462-9ec2-7ed2b05b114b/nova-api-api/0.log" Dec 13 04:37:30 crc kubenswrapper[5070]: I1213 04:37:30.680293 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_cfd0162b-8004-4817-8f85-efd5c493e3c0/nova-cell1-novncproxy-novncproxy/0.log" Dec 13 04:37:30 crc kubenswrapper[5070]: I1213 04:37:30.737355 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-tn2zd_62c6b21d-3500-4f16-b958-7a59dd7a7fda/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:31 crc kubenswrapper[5070]: I1213 04:37:31.025244 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01/nova-metadata-log/0.log" Dec 13 04:37:31 crc kubenswrapper[5070]: I1213 04:37:31.417943 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_edbeeef8-80ad-4b02-bc7b-988cdec64a99/nova-scheduler-scheduler/0.log" Dec 13 04:37:31 crc kubenswrapper[5070]: I1213 04:37:31.475480 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9dd33a5a-3305-45ff-a544-0bae02032d8f/mysql-bootstrap/0.log" Dec 13 04:37:31 crc kubenswrapper[5070]: I1213 04:37:31.653176 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9dd33a5a-3305-45ff-a544-0bae02032d8f/mysql-bootstrap/0.log" Dec 13 04:37:31 crc kubenswrapper[5070]: I1213 04:37:31.696215 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_9dd33a5a-3305-45ff-a544-0bae02032d8f/galera/0.log" Dec 13 04:37:31 crc kubenswrapper[5070]: I1213 04:37:31.881994 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_df672553-0b7d-4554-9ff6-c5d9a674dffd/mysql-bootstrap/0.log" Dec 13 04:37:32 crc kubenswrapper[5070]: I1213 04:37:32.134490 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_df672553-0b7d-4554-9ff6-c5d9a674dffd/mysql-bootstrap/0.log" Dec 13 04:37:32 crc kubenswrapper[5070]: I1213 04:37:32.164308 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_df672553-0b7d-4554-9ff6-c5d9a674dffd/galera/0.log" Dec 13 04:37:32 crc kubenswrapper[5070]: I1213 04:37:32.369825 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_0d0b3d1f-385d-47e5-af36-d016c5b9cd1b/openstackclient/0.log" Dec 13 04:37:32 crc kubenswrapper[5070]: I1213 04:37:32.591628 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-dvs7b_30ad4728-1762-464a-a3ab-a24923973e0e/openstack-network-exporter/0.log" Dec 13 04:37:32 crc kubenswrapper[5070]: I1213 04:37:32.759618 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zmbl5_5f8ec385-4f69-4645-95d2-2d854b3fac57/ovsdb-server-init/0.log" Dec 13 04:37:32 crc kubenswrapper[5070]: I1213 04:37:32.995074 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zmbl5_5f8ec385-4f69-4645-95d2-2d854b3fac57/ovsdb-server-init/0.log" Dec 13 04:37:33 crc kubenswrapper[5070]: I1213 04:37:33.003789 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zmbl5_5f8ec385-4f69-4645-95d2-2d854b3fac57/ovs-vswitchd/0.log" Dec 13 04:37:33 crc kubenswrapper[5070]: I1213 04:37:33.184553 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zmbl5_5f8ec385-4f69-4645-95d2-2d854b3fac57/ovsdb-server/0.log" Dec 13 04:37:33 crc kubenswrapper[5070]: I1213 04:37:33.365239 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a37bc3a1-ce6c-4c58-b7d6-cb9d64227a01/nova-metadata-metadata/0.log" Dec 13 04:37:33 crc kubenswrapper[5070]: I1213 04:37:33.375656 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-x4xfl_fbcbdef3-6b6e-442e-9a5a-3bc14faf3be4/ovn-controller/0.log" Dec 13 04:37:33 crc kubenswrapper[5070]: I1213 04:37:33.433299 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_8fd14d72-1dc4-4ab9-8b92-27e740e7eada/cinder-volume/0.log" Dec 13 04:37:33 crc kubenswrapper[5070]: I1213 04:37:33.837790 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_35f8712b-8f90-45cb-a2c6-dbbcc7357542/openstack-network-exporter/0.log" Dec 13 04:37:33 crc kubenswrapper[5070]: I1213 04:37:33.839487 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-59xp7_ae4ad2bd-aeff-4812-b171-7630319ad71e/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:33 crc kubenswrapper[5070]: I1213 04:37:33.864259 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_35f8712b-8f90-45cb-a2c6-dbbcc7357542/ovn-northd/0.log" Dec 13 04:37:34 crc kubenswrapper[5070]: I1213 04:37:34.092761 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_037626c8-04bb-4af4-a5f3-309c3c174f98/ovsdbserver-nb/0.log" Dec 13 04:37:34 crc kubenswrapper[5070]: I1213 04:37:34.121024 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_037626c8-04bb-4af4-a5f3-309c3c174f98/openstack-network-exporter/0.log" Dec 13 04:37:34 crc kubenswrapper[5070]: I1213 04:37:34.269249 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_077843d3-f8ee-476e-b18c-da48a3a2f200/openstack-network-exporter/0.log" Dec 13 04:37:34 crc kubenswrapper[5070]: I1213 04:37:34.360481 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_077843d3-f8ee-476e-b18c-da48a3a2f200/ovsdbserver-sb/0.log" Dec 13 04:37:34 crc kubenswrapper[5070]: I1213 04:37:34.499984 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6ff9d66794-qds4v_9b374c77-6272-4397-b17e-c0bc4b8e3803/placement-api/0.log" Dec 13 04:37:34 crc kubenswrapper[5070]: I1213 04:37:34.680870 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6ff9d66794-qds4v_9b374c77-6272-4397-b17e-c0bc4b8e3803/placement-log/0.log" Dec 13 04:37:34 crc kubenswrapper[5070]: I1213 04:37:34.690965 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_46b708f1-eabc-44f5-8388-8e6b42d66fd0/setup-container/0.log" Dec 13 04:37:34 crc kubenswrapper[5070]: I1213 04:37:34.847862 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_46b708f1-eabc-44f5-8388-8e6b42d66fd0/setup-container/0.log" Dec 13 04:37:34 crc kubenswrapper[5070]: I1213 04:37:34.895543 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_965e1f2f-3f50-4411-8006-4db60cb5a504/setup-container/0.log" Dec 13 04:37:34 crc kubenswrapper[5070]: I1213 04:37:34.937981 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_46b708f1-eabc-44f5-8388-8e6b42d66fd0/rabbitmq/0.log" Dec 13 04:37:35 crc kubenswrapper[5070]: I1213 04:37:35.157468 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_965e1f2f-3f50-4411-8006-4db60cb5a504/rabbitmq/0.log" Dec 13 04:37:35 crc kubenswrapper[5070]: I1213 04:37:35.185560 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_965e1f2f-3f50-4411-8006-4db60cb5a504/setup-container/0.log" Dec 13 04:37:35 crc kubenswrapper[5070]: I1213 04:37:35.225145 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-psxtf_82cfb89d-1206-4fa9-881d-c8ff899d9ee8/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:35 crc kubenswrapper[5070]: I1213 04:37:35.436282 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-wd2rj_cd87f8d8-10c1-4341-b414-c234b474b4e2/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:35 crc kubenswrapper[5070]: I1213 04:37:35.526103 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-vdj8w_a3440a4a-ac95-4afa-b10a-f95c600e4dcc/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:35 crc kubenswrapper[5070]: I1213 04:37:35.690461 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-c9pf8_bfbab1e3-812a-4942-bc27-14e6c8754264/ssh-known-hosts-edpm-deployment/0.log" Dec 13 04:37:35 crc kubenswrapper[5070]: I1213 04:37:35.792102 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_7414f018-4f89-4d33-a19a-af5e996ba16b/tempest-tests-tempest-tests-runner/0.log" Dec 13 04:37:35 crc kubenswrapper[5070]: I1213 04:37:35.858459 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_3e4d60ee-2d7b-4caf-9cfd-ace33894a93f/test-operator-logs-container/0.log" Dec 13 04:37:36 crc kubenswrapper[5070]: I1213 04:37:36.029616 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-gz4s8_a603c4f1-a640-47d1-819e-0518857464c4/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 04:37:39 crc kubenswrapper[5070]: E1213 04:37:39.873618 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod058a2d61_481d_4071_ae35_dcb193c2126d.slice\": RecentStats: unable to find data in memory cache]" Dec 13 04:37:44 crc kubenswrapper[5070]: I1213 04:37:44.354633 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c8cecacd-8f32-41ea-a0bb-1b1cab38ad2b/memcached/0.log" Dec 13 04:38:02 crc kubenswrapper[5070]: I1213 04:38:02.105645 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q_c03750de-0097-4810-a030-d5a3f98f68fe/util/0.log" Dec 13 04:38:02 crc kubenswrapper[5070]: I1213 04:38:02.270063 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q_c03750de-0097-4810-a030-d5a3f98f68fe/util/0.log" Dec 13 04:38:02 crc kubenswrapper[5070]: I1213 04:38:02.278945 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q_c03750de-0097-4810-a030-d5a3f98f68fe/pull/0.log" Dec 13 04:38:02 crc kubenswrapper[5070]: I1213 04:38:02.286578 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q_c03750de-0097-4810-a030-d5a3f98f68fe/pull/0.log" Dec 13 04:38:02 crc kubenswrapper[5070]: I1213 04:38:02.507496 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q_c03750de-0097-4810-a030-d5a3f98f68fe/pull/0.log" Dec 13 04:38:02 crc kubenswrapper[5070]: I1213 04:38:02.548022 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q_c03750de-0097-4810-a030-d5a3f98f68fe/util/0.log" Dec 13 04:38:02 crc kubenswrapper[5070]: I1213 04:38:02.554239 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c2b05ec4440d1c803ed4d9fb5e82bd8db8287830a02ba7dc105a163cc24n5q_c03750de-0097-4810-a030-d5a3f98f68fe/extract/0.log" Dec 13 04:38:02 crc kubenswrapper[5070]: I1213 04:38:02.715928 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-5bfbbb859d-blc2l_460be9d3-0ac8-4080-ba44-48db8452a323/kube-rbac-proxy/0.log" Dec 13 04:38:02 crc kubenswrapper[5070]: I1213 04:38:02.757743 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-5bfbbb859d-blc2l_460be9d3-0ac8-4080-ba44-48db8452a323/manager/0.log" Dec 13 04:38:02 crc kubenswrapper[5070]: I1213 04:38:02.764369 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-75f975cf8c-wb8p4_df0aabbc-0421-41d7-ac6b-540a4bd7121e/kube-rbac-proxy/0.log" Dec 13 04:38:03 crc kubenswrapper[5070]: I1213 04:38:03.342880 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-75f975cf8c-wb8p4_df0aabbc-0421-41d7-ac6b-540a4bd7121e/manager/0.log" Dec 13 04:38:03 crc kubenswrapper[5070]: I1213 04:38:03.364870 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6788cc6d75-8hczh_5848798f-7e27-4b39-b60d-84edb77c765d/kube-rbac-proxy/0.log" Dec 13 04:38:03 crc kubenswrapper[5070]: I1213 04:38:03.406004 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6788cc6d75-8hczh_5848798f-7e27-4b39-b60d-84edb77c765d/manager/0.log" Dec 13 04:38:03 crc kubenswrapper[5070]: I1213 04:38:03.543044 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-85fbd69fcd-98cmt_17401b15-4810-4d28-9244-f1ef166c3278/kube-rbac-proxy/0.log" Dec 13 04:38:03 crc kubenswrapper[5070]: I1213 04:38:03.621819 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-85fbd69fcd-98cmt_17401b15-4810-4d28-9244-f1ef166c3278/manager/0.log" Dec 13 04:38:03 crc kubenswrapper[5070]: I1213 04:38:03.704595 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-698d6fd7d6-nm89q_62ea5d75-2d09-414d-b682-6ed196245ea9/kube-rbac-proxy/0.log" Dec 13 04:38:03 crc kubenswrapper[5070]: I1213 04:38:03.762961 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-698d6fd7d6-nm89q_62ea5d75-2d09-414d-b682-6ed196245ea9/manager/0.log" Dec 13 04:38:03 crc kubenswrapper[5070]: I1213 04:38:03.839850 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7d5d9fd47f-lt4rh_17d2a89b-feed-4eae-bbc3-5296e677ef48/kube-rbac-proxy/0.log" Dec 13 04:38:03 crc kubenswrapper[5070]: I1213 04:38:03.876143 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7d5d9fd47f-lt4rh_17d2a89b-feed-4eae-bbc3-5296e677ef48/manager/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.009797 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-85d55b5858-xjz7n_4fadcc6f-e200-444e-b1d4-e195467c129d/kube-rbac-proxy/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.159689 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-54485f899-6ccdh_eac9d917-6a87-4f79-9758-15984dd71e23/kube-rbac-proxy/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.161368 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-85d55b5858-xjz7n_4fadcc6f-e200-444e-b1d4-e195467c129d/manager/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.230534 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-54485f899-6ccdh_eac9d917-6a87-4f79-9758-15984dd71e23/manager/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.370904 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-79cc9d59f5-tnnbf_a037ecf5-dd37-4305-93a6-e28771e8df87/kube-rbac-proxy/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.389504 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-79cc9d59f5-tnnbf_a037ecf5-dd37-4305-93a6-e28771e8df87/manager/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.506467 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5cbc8c7f96-889ct_66c61c3a-a7b0-4a4a-b086-9c1531a9b165/kube-rbac-proxy/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.590084 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5cbc8c7f96-889ct_66c61c3a-a7b0-4a4a-b086-9c1531a9b165/manager/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.637047 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-64d7c556cd-9xzl9_26894e71-7711-47cc-afe2-44f1d0657000/kube-rbac-proxy/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.733278 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-64d7c556cd-9xzl9_26894e71-7711-47cc-afe2-44f1d0657000/manager/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.757832 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-58879495c-mfwwx_cfd2f621-2f11-4b93-8b02-1ed72c06bb11/kube-rbac-proxy/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.860232 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-58879495c-mfwwx_cfd2f621-2f11-4b93-8b02-1ed72c06bb11/manager/0.log" Dec 13 04:38:04 crc kubenswrapper[5070]: I1213 04:38:04.914778 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79d658b66d-9kcnz_714804fd-d184-418f-a12f-efaec040cef1/kube-rbac-proxy/0.log" Dec 13 04:38:05 crc kubenswrapper[5070]: I1213 04:38:05.024070 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79d658b66d-9kcnz_714804fd-d184-418f-a12f-efaec040cef1/manager/0.log" Dec 13 04:38:05 crc kubenswrapper[5070]: I1213 04:38:05.704978 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-d5fb87cb8-5mqm7_954cafe5-7d98-4ccd-ad79-2c928fa65dc1/kube-rbac-proxy/0.log" Dec 13 04:38:05 crc kubenswrapper[5070]: I1213 04:38:05.706720 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-d5fb87cb8-5mqm7_954cafe5-7d98-4ccd-ad79-2c928fa65dc1/manager/0.log" Dec 13 04:38:05 crc kubenswrapper[5070]: I1213 04:38:05.735495 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77868f484-qs7zf_bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a/kube-rbac-proxy/0.log" Dec 13 04:38:05 crc kubenswrapper[5070]: I1213 04:38:05.916710 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-67ccbf7849-8gcsf_3728efb1-b3b6-4c34-b375-6f3feb0b26a7/kube-rbac-proxy/0.log" Dec 13 04:38:05 crc kubenswrapper[5070]: I1213 04:38:05.927585 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77868f484-qs7zf_bf9b1f68-efa4-4a9e-bcd3-6bcbad0dbd9a/manager/0.log" Dec 13 04:38:06 crc kubenswrapper[5070]: I1213 04:38:06.179508 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-69d6bd7c7c-k2jww_6dcdfaac-dbf4-459e-9798-3a1de9b35794/kube-rbac-proxy/0.log" Dec 13 04:38:06 crc kubenswrapper[5070]: I1213 04:38:06.366821 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-69d6bd7c7c-k2jww_6dcdfaac-dbf4-459e-9798-3a1de9b35794/operator/0.log" Dec 13 04:38:06 crc kubenswrapper[5070]: I1213 04:38:06.431509 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cdbc4_d57a056b-59d7-4a1e-ba1e-1102683c6118/registry-server/0.log" Dec 13 04:38:06 crc kubenswrapper[5070]: I1213 04:38:06.641718 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5b67cfc8fb-8fhvk_a4e6faf7-1a69-45ea-ab85-23acffcd5cf0/kube-rbac-proxy/0.log" Dec 13 04:38:06 crc kubenswrapper[5070]: I1213 04:38:06.689698 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-867d87977b-v59rf_10bd6456-bd70-4e1a-a6e0-5eff23c9fec6/kube-rbac-proxy/0.log" Dec 13 04:38:06 crc kubenswrapper[5070]: I1213 04:38:06.710323 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5b67cfc8fb-8fhvk_a4e6faf7-1a69-45ea-ab85-23acffcd5cf0/manager/0.log" Dec 13 04:38:06 crc kubenswrapper[5070]: I1213 04:38:06.832351 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-867d87977b-v59rf_10bd6456-bd70-4e1a-a6e0-5eff23c9fec6/manager/0.log" Dec 13 04:38:07 crc kubenswrapper[5070]: I1213 04:38:07.040050 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-2c52x_890ff40a-bb3f-435f-b823-a4e93bf712c0/operator/0.log" Dec 13 04:38:07 crc kubenswrapper[5070]: I1213 04:38:07.098946 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-8f6687c44-lcwtx_515c6c6d-7b89-4ee4-a1eb-d6ed51834050/kube-rbac-proxy/0.log" Dec 13 04:38:07 crc kubenswrapper[5070]: I1213 04:38:07.183836 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-67ccbf7849-8gcsf_3728efb1-b3b6-4c34-b375-6f3feb0b26a7/manager/0.log" Dec 13 04:38:07 crc kubenswrapper[5070]: I1213 04:38:07.527378 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-8f6687c44-lcwtx_515c6c6d-7b89-4ee4-a1eb-d6ed51834050/manager/0.log" Dec 13 04:38:07 crc kubenswrapper[5070]: I1213 04:38:07.569678 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-695797c565-nh8lk_42bdbde2-ee6a-4260-8088-4298757880e1/kube-rbac-proxy/0.log" Dec 13 04:38:07 crc kubenswrapper[5070]: I1213 04:38:07.698330 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-695797c565-nh8lk_42bdbde2-ee6a-4260-8088-4298757880e1/manager/0.log" Dec 13 04:38:07 crc kubenswrapper[5070]: I1213 04:38:07.716672 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-bb86466d8-qhqhn_69dbad02-02d2-4a9c-befc-bf082990eca7/kube-rbac-proxy/0.log" Dec 13 04:38:07 crc kubenswrapper[5070]: I1213 04:38:07.796379 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-bb86466d8-qhqhn_69dbad02-02d2-4a9c-befc-bf082990eca7/manager/0.log" Dec 13 04:38:07 crc kubenswrapper[5070]: I1213 04:38:07.818276 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b56b8849f-8svhb_875771e8-6d22-42d9-89bc-614fdd9e41fa/kube-rbac-proxy/0.log" Dec 13 04:38:07 crc kubenswrapper[5070]: I1213 04:38:07.862966 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b56b8849f-8svhb_875771e8-6d22-42d9-89bc-614fdd9e41fa/manager/0.log" Dec 13 04:38:24 crc kubenswrapper[5070]: I1213 04:38:24.699708 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-dlrd7_6234fc7c-52ec-4021-b04f-0264df7a6307/control-plane-machine-set-operator/0.log" Dec 13 04:38:24 crc kubenswrapper[5070]: I1213 04:38:24.706208 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-z445m_9829d6ca-fc22-4e71-a966-cff569f273fb/kube-rbac-proxy/0.log" Dec 13 04:38:24 crc kubenswrapper[5070]: I1213 04:38:24.826944 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-z445m_9829d6ca-fc22-4e71-a966-cff569f273fb/machine-api-operator/0.log" Dec 13 04:38:38 crc kubenswrapper[5070]: I1213 04:38:38.361875 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-wjw2w_8d55d441-4092-4ea3-b7a8-3663e819124a/cert-manager-webhook/0.log" Dec 13 04:38:38 crc kubenswrapper[5070]: I1213 04:38:38.373405 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-k6njx_79ea0824-5ff1-40ab-b605-4a299e403c62/cert-manager-cainjector/0.log" Dec 13 04:38:38 crc kubenswrapper[5070]: I1213 04:38:38.398130 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-sql5c_13ca830e-d979-4549-8043-df7846a52f28/cert-manager-controller/0.log" Dec 13 04:38:51 crc kubenswrapper[5070]: I1213 04:38:51.986072 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6ff7998486-8xq5d_ff50d9d0-6e48-4570-89e4-f4a1eb0e8012/nmstate-console-plugin/0.log" Dec 13 04:38:52 crc kubenswrapper[5070]: I1213 04:38:52.106054 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-srhsc_9cdef900-29f1-4965-a889-4086d7422f44/nmstate-handler/0.log" Dec 13 04:38:52 crc kubenswrapper[5070]: I1213 04:38:52.174953 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-j499r_7579c85c-ab24-4642-9bde-28ab06c7db9c/kube-rbac-proxy/0.log" Dec 13 04:38:52 crc kubenswrapper[5070]: I1213 04:38:52.210736 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-j499r_7579c85c-ab24-4642-9bde-28ab06c7db9c/nmstate-metrics/0.log" Dec 13 04:38:52 crc kubenswrapper[5070]: I1213 04:38:52.317135 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-6769fb99d-r8zml_05a2a978-4fb0-43cb-8402-89e02e584eed/nmstate-operator/0.log" Dec 13 04:38:52 crc kubenswrapper[5070]: I1213 04:38:52.422071 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-f8fb84555-s4zck_e1e57c1c-b54c-461c-b834-b5c4c9fc3c94/nmstate-webhook/0.log" Dec 13 04:39:08 crc kubenswrapper[5070]: I1213 04:39:08.290836 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-wmrgw_6f946df5-dc5e-4b3a-afd7-51209e6dd09b/kube-rbac-proxy/0.log" Dec 13 04:39:08 crc kubenswrapper[5070]: I1213 04:39:08.423676 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-wmrgw_6f946df5-dc5e-4b3a-afd7-51209e6dd09b/controller/0.log" Dec 13 04:39:08 crc kubenswrapper[5070]: I1213 04:39:08.541407 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-frr-files/0.log" Dec 13 04:39:08 crc kubenswrapper[5070]: I1213 04:39:08.653350 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-reloader/0.log" Dec 13 04:39:08 crc kubenswrapper[5070]: I1213 04:39:08.688846 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-frr-files/0.log" Dec 13 04:39:08 crc kubenswrapper[5070]: I1213 04:39:08.690001 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-metrics/0.log" Dec 13 04:39:08 crc kubenswrapper[5070]: I1213 04:39:08.725541 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-reloader/0.log" Dec 13 04:39:08 crc kubenswrapper[5070]: I1213 04:39:08.904718 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-frr-files/0.log" Dec 13 04:39:08 crc kubenswrapper[5070]: I1213 04:39:08.913985 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-metrics/0.log" Dec 13 04:39:08 crc kubenswrapper[5070]: I1213 04:39:08.947392 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-reloader/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.021738 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-metrics/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.137434 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-frr-files/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.164172 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-reloader/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.227944 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/controller/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.230338 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/cp-metrics/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.400321 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/frr-metrics/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.461097 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/kube-rbac-proxy-frr/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.482286 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/kube-rbac-proxy/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.653943 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/reloader/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.663508 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7784b6fcf-jg4tq_64134541-9479-4e28-9408-092333fa9e08/frr-k8s-webhook-server/0.log" Dec 13 04:39:09 crc kubenswrapper[5070]: I1213 04:39:09.946783 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-58d87b8dcc-6wpw7_e944687d-2c42-446f-bc22-a7885909cead/manager/0.log" Dec 13 04:39:10 crc kubenswrapper[5070]: I1213 04:39:10.119913 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6f764c67c4-2kqc9_68ca8795-fc30-4cbe-a185-80555ab011d1/webhook-server/0.log" Dec 13 04:39:10 crc kubenswrapper[5070]: I1213 04:39:10.188773 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8kznn_8216ae30-2f9d-47c4-a748-3b0456ff9a27/kube-rbac-proxy/0.log" Dec 13 04:39:11 crc kubenswrapper[5070]: I1213 04:39:11.042341 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8kznn_8216ae30-2f9d-47c4-a748-3b0456ff9a27/speaker/0.log" Dec 13 04:39:11 crc kubenswrapper[5070]: I1213 04:39:11.097041 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-4s5j9_26e460ad-a509-4c45-a5a7-64cd87d2a5f0/frr/0.log" Dec 13 04:39:21 crc kubenswrapper[5070]: I1213 04:39:21.943339 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:39:21 crc kubenswrapper[5070]: I1213 04:39:21.944011 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.159645 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n_b7800c1a-3a8e-4647-91be-97515b5c094c/util/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.259395 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n_b7800c1a-3a8e-4647-91be-97515b5c094c/util/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.271660 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n_b7800c1a-3a8e-4647-91be-97515b5c094c/pull/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.311885 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n_b7800c1a-3a8e-4647-91be-97515b5c094c/pull/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.493472 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n_b7800c1a-3a8e-4647-91be-97515b5c094c/util/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.494242 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n_b7800c1a-3a8e-4647-91be-97515b5c094c/pull/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.558488 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4kcf9n_b7800c1a-3a8e-4647-91be-97515b5c094c/extract/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.702281 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm_4461d5cb-115e-4a85-92e2-066539781b0c/util/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.827854 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm_4461d5cb-115e-4a85-92e2-066539781b0c/util/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.850140 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm_4461d5cb-115e-4a85-92e2-066539781b0c/pull/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.853931 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm_4461d5cb-115e-4a85-92e2-066539781b0c/pull/0.log" Dec 13 04:39:25 crc kubenswrapper[5070]: I1213 04:39:25.993783 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm_4461d5cb-115e-4a85-92e2-066539781b0c/util/0.log" Dec 13 04:39:26 crc kubenswrapper[5070]: I1213 04:39:26.017489 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm_4461d5cb-115e-4a85-92e2-066539781b0c/extract/0.log" Dec 13 04:39:26 crc kubenswrapper[5070]: I1213 04:39:26.053026 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8mvkgm_4461d5cb-115e-4a85-92e2-066539781b0c/pull/0.log" Dec 13 04:39:26 crc kubenswrapper[5070]: I1213 04:39:26.172644 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dbtp_6cee5946-a6c3-4d8e-a6f3-7200c45c3a98/extract-utilities/0.log" Dec 13 04:39:26 crc kubenswrapper[5070]: I1213 04:39:26.350302 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dbtp_6cee5946-a6c3-4d8e-a6f3-7200c45c3a98/extract-utilities/0.log" Dec 13 04:39:26 crc kubenswrapper[5070]: I1213 04:39:26.374068 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dbtp_6cee5946-a6c3-4d8e-a6f3-7200c45c3a98/extract-content/0.log" Dec 13 04:39:26 crc kubenswrapper[5070]: I1213 04:39:26.376148 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dbtp_6cee5946-a6c3-4d8e-a6f3-7200c45c3a98/extract-content/0.log" Dec 13 04:39:27 crc kubenswrapper[5070]: I1213 04:39:27.156611 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dbtp_6cee5946-a6c3-4d8e-a6f3-7200c45c3a98/extract-content/0.log" Dec 13 04:39:27 crc kubenswrapper[5070]: I1213 04:39:27.223171 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dbtp_6cee5946-a6c3-4d8e-a6f3-7200c45c3a98/extract-utilities/0.log" Dec 13 04:39:27 crc kubenswrapper[5070]: I1213 04:39:27.395764 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db5z5_99ef6e31-9820-4f3c-9d75-10e8286eb467/extract-utilities/0.log" Dec 13 04:39:27 crc kubenswrapper[5070]: I1213 04:39:27.664423 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db5z5_99ef6e31-9820-4f3c-9d75-10e8286eb467/extract-utilities/0.log" Dec 13 04:39:27 crc kubenswrapper[5070]: I1213 04:39:27.713993 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db5z5_99ef6e31-9820-4f3c-9d75-10e8286eb467/extract-content/0.log" Dec 13 04:39:27 crc kubenswrapper[5070]: I1213 04:39:27.729038 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db5z5_99ef6e31-9820-4f3c-9d75-10e8286eb467/extract-content/0.log" Dec 13 04:39:27 crc kubenswrapper[5070]: I1213 04:39:27.951582 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db5z5_99ef6e31-9820-4f3c-9d75-10e8286eb467/extract-utilities/0.log" Dec 13 04:39:28 crc kubenswrapper[5070]: I1213 04:39:28.006613 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dbtp_6cee5946-a6c3-4d8e-a6f3-7200c45c3a98/registry-server/0.log" Dec 13 04:39:28 crc kubenswrapper[5070]: I1213 04:39:28.009658 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db5z5_99ef6e31-9820-4f3c-9d75-10e8286eb467/extract-content/0.log" Dec 13 04:39:28 crc kubenswrapper[5070]: I1213 04:39:28.208662 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-pszz7_e6b959a6-f715-46c4-9192-c9c372246129/marketplace-operator/0.log" Dec 13 04:39:28 crc kubenswrapper[5070]: I1213 04:39:28.405547 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xmckj_e3984f87-e746-4d49-95d2-acca4c05400a/extract-utilities/0.log" Dec 13 04:39:28 crc kubenswrapper[5070]: I1213 04:39:28.834344 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db5z5_99ef6e31-9820-4f3c-9d75-10e8286eb467/registry-server/0.log" Dec 13 04:39:28 crc kubenswrapper[5070]: I1213 04:39:28.962479 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xmckj_e3984f87-e746-4d49-95d2-acca4c05400a/extract-content/0.log" Dec 13 04:39:28 crc kubenswrapper[5070]: I1213 04:39:28.987877 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xmckj_e3984f87-e746-4d49-95d2-acca4c05400a/extract-content/0.log" Dec 13 04:39:28 crc kubenswrapper[5070]: I1213 04:39:28.995361 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xmckj_e3984f87-e746-4d49-95d2-acca4c05400a/extract-utilities/0.log" Dec 13 04:39:29 crc kubenswrapper[5070]: I1213 04:39:29.131095 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xmckj_e3984f87-e746-4d49-95d2-acca4c05400a/extract-content/0.log" Dec 13 04:39:29 crc kubenswrapper[5070]: I1213 04:39:29.151031 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xmckj_e3984f87-e746-4d49-95d2-acca4c05400a/extract-utilities/0.log" Dec 13 04:39:29 crc kubenswrapper[5070]: I1213 04:39:29.445592 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6lzk5_91cbae40-2fa3-4c39-acd8-43cc2c23d902/extract-utilities/0.log" Dec 13 04:39:29 crc kubenswrapper[5070]: I1213 04:39:29.509137 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6lzk5_91cbae40-2fa3-4c39-acd8-43cc2c23d902/extract-utilities/0.log" Dec 13 04:39:29 crc kubenswrapper[5070]: I1213 04:39:29.588246 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6lzk5_91cbae40-2fa3-4c39-acd8-43cc2c23d902/extract-content/0.log" Dec 13 04:39:29 crc kubenswrapper[5070]: I1213 04:39:29.627726 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xmckj_e3984f87-e746-4d49-95d2-acca4c05400a/registry-server/0.log" Dec 13 04:39:29 crc kubenswrapper[5070]: I1213 04:39:29.636891 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6lzk5_91cbae40-2fa3-4c39-acd8-43cc2c23d902/extract-content/0.log" Dec 13 04:39:29 crc kubenswrapper[5070]: I1213 04:39:29.803929 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6lzk5_91cbae40-2fa3-4c39-acd8-43cc2c23d902/extract-utilities/0.log" Dec 13 04:39:29 crc kubenswrapper[5070]: I1213 04:39:29.807885 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6lzk5_91cbae40-2fa3-4c39-acd8-43cc2c23d902/extract-content/0.log" Dec 13 04:39:30 crc kubenswrapper[5070]: I1213 04:39:30.471242 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6lzk5_91cbae40-2fa3-4c39-acd8-43cc2c23d902/registry-server/0.log" Dec 13 04:39:51 crc kubenswrapper[5070]: I1213 04:39:51.942971 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:39:51 crc kubenswrapper[5070]: I1213 04:39:51.943591 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:40:21 crc kubenswrapper[5070]: I1213 04:40:21.942820 5070 patch_prober.go:28] interesting pod/machine-config-daemon-9l4rb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 04:40:21 crc kubenswrapper[5070]: I1213 04:40:21.943423 5070 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 04:40:21 crc kubenswrapper[5070]: I1213 04:40:21.943515 5070 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" Dec 13 04:40:21 crc kubenswrapper[5070]: I1213 04:40:21.944552 5070 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0"} pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 04:40:21 crc kubenswrapper[5070]: I1213 04:40:21.944635 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" containerName="machine-config-daemon" containerID="cri-o://4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" gracePeriod=600 Dec 13 04:40:22 crc kubenswrapper[5070]: E1213 04:40:22.081245 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:40:22 crc kubenswrapper[5070]: I1213 04:40:22.596622 5070 generic.go:334] "Generic (PLEG): container finished" podID="a2e447c7-5901-414f-af96-69441d4750db" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" exitCode=0 Dec 13 04:40:22 crc kubenswrapper[5070]: I1213 04:40:22.596678 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerDied","Data":"4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0"} Dec 13 04:40:22 crc kubenswrapper[5070]: I1213 04:40:22.596714 5070 scope.go:117] "RemoveContainer" containerID="c34f4a52bb9647b9f7bcdab3043ab312c6ab456ed8b7b789d6017a360b184ec2" Dec 13 04:40:22 crc kubenswrapper[5070]: I1213 04:40:22.600708 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:40:22 crc kubenswrapper[5070]: E1213 04:40:22.601305 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.699308 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n449v"] Dec 13 04:40:24 crc kubenswrapper[5070]: E1213 04:40:24.700869 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerName="registry-server" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.700903 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerName="registry-server" Dec 13 04:40:24 crc kubenswrapper[5070]: E1213 04:40:24.700929 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerName="extract-content" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.700948 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerName="extract-content" Dec 13 04:40:24 crc kubenswrapper[5070]: E1213 04:40:24.700989 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerName="extract-utilities" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.701008 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerName="extract-utilities" Dec 13 04:40:24 crc kubenswrapper[5070]: E1213 04:40:24.701069 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerName="extract-content" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.701089 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerName="extract-content" Dec 13 04:40:24 crc kubenswrapper[5070]: E1213 04:40:24.701137 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerName="extract-utilities" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.701154 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerName="extract-utilities" Dec 13 04:40:24 crc kubenswrapper[5070]: E1213 04:40:24.701187 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerName="registry-server" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.701203 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerName="registry-server" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.701751 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bff6e2c-8357-4b44-854d-8ff82897ee24" containerName="registry-server" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.701809 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="fca679a6-3edb-4cfa-a9bb-de299bc26562" containerName="registry-server" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.704790 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.713248 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n449v"] Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.802655 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbrrw\" (UniqueName: \"kubernetes.io/projected/b769b1e7-7490-4be2-984b-958a593082b2-kube-api-access-gbrrw\") pod \"redhat-operators-n449v\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.802751 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-utilities\") pod \"redhat-operators-n449v\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.802806 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-catalog-content\") pod \"redhat-operators-n449v\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.903991 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-catalog-content\") pod \"redhat-operators-n449v\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.904135 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbrrw\" (UniqueName: \"kubernetes.io/projected/b769b1e7-7490-4be2-984b-958a593082b2-kube-api-access-gbrrw\") pod \"redhat-operators-n449v\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.904229 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-utilities\") pod \"redhat-operators-n449v\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.904974 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-catalog-content\") pod \"redhat-operators-n449v\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.905030 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-utilities\") pod \"redhat-operators-n449v\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:24 crc kubenswrapper[5070]: I1213 04:40:24.940345 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbrrw\" (UniqueName: \"kubernetes.io/projected/b769b1e7-7490-4be2-984b-958a593082b2-kube-api-access-gbrrw\") pod \"redhat-operators-n449v\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:25 crc kubenswrapper[5070]: I1213 04:40:25.039059 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:25 crc kubenswrapper[5070]: I1213 04:40:25.530116 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n449v"] Dec 13 04:40:25 crc kubenswrapper[5070]: I1213 04:40:25.640651 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n449v" event={"ID":"b769b1e7-7490-4be2-984b-958a593082b2","Type":"ContainerStarted","Data":"060b9e9ce30c387e35e1443b0b842fb88c0d570a97492d051cd51989c20be8dc"} Dec 13 04:40:26 crc kubenswrapper[5070]: I1213 04:40:26.650974 5070 generic.go:334] "Generic (PLEG): container finished" podID="b769b1e7-7490-4be2-984b-958a593082b2" containerID="2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e" exitCode=0 Dec 13 04:40:26 crc kubenswrapper[5070]: I1213 04:40:26.651081 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n449v" event={"ID":"b769b1e7-7490-4be2-984b-958a593082b2","Type":"ContainerDied","Data":"2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e"} Dec 13 04:40:26 crc kubenswrapper[5070]: I1213 04:40:26.655088 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 04:40:27 crc kubenswrapper[5070]: I1213 04:40:27.676248 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n449v" event={"ID":"b769b1e7-7490-4be2-984b-958a593082b2","Type":"ContainerStarted","Data":"2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6"} Dec 13 04:40:29 crc kubenswrapper[5070]: I1213 04:40:29.696674 5070 generic.go:334] "Generic (PLEG): container finished" podID="b769b1e7-7490-4be2-984b-958a593082b2" containerID="2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6" exitCode=0 Dec 13 04:40:29 crc kubenswrapper[5070]: I1213 04:40:29.696871 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n449v" event={"ID":"b769b1e7-7490-4be2-984b-958a593082b2","Type":"ContainerDied","Data":"2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6"} Dec 13 04:40:30 crc kubenswrapper[5070]: I1213 04:40:30.710778 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n449v" event={"ID":"b769b1e7-7490-4be2-984b-958a593082b2","Type":"ContainerStarted","Data":"72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1"} Dec 13 04:40:35 crc kubenswrapper[5070]: I1213 04:40:35.039932 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:35 crc kubenswrapper[5070]: I1213 04:40:35.040586 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:36 crc kubenswrapper[5070]: I1213 04:40:36.089426 5070 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-n449v" podUID="b769b1e7-7490-4be2-984b-958a593082b2" containerName="registry-server" probeResult="failure" output=< Dec 13 04:40:36 crc kubenswrapper[5070]: timeout: failed to connect service ":50051" within 1s Dec 13 04:40:36 crc kubenswrapper[5070]: > Dec 13 04:40:37 crc kubenswrapper[5070]: I1213 04:40:37.167287 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:40:37 crc kubenswrapper[5070]: E1213 04:40:37.168117 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:40:45 crc kubenswrapper[5070]: I1213 04:40:45.109340 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:45 crc kubenswrapper[5070]: I1213 04:40:45.150707 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n449v" podStartSLOduration=17.572382601 podStartE2EDuration="21.15068666s" podCreationTimestamp="2025-12-13 04:40:24 +0000 UTC" firstStartedPulling="2025-12-13 04:40:26.654773193 +0000 UTC m=+5318.890616739" lastFinishedPulling="2025-12-13 04:40:30.233077242 +0000 UTC m=+5322.468920798" observedRunningTime="2025-12-13 04:40:30.734764364 +0000 UTC m=+5322.970607900" watchObservedRunningTime="2025-12-13 04:40:45.15068666 +0000 UTC m=+5337.386530206" Dec 13 04:40:45 crc kubenswrapper[5070]: I1213 04:40:45.184989 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:45 crc kubenswrapper[5070]: I1213 04:40:45.364055 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n449v"] Dec 13 04:40:46 crc kubenswrapper[5070]: I1213 04:40:46.890024 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n449v" podUID="b769b1e7-7490-4be2-984b-958a593082b2" containerName="registry-server" containerID="cri-o://72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1" gracePeriod=2 Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.388310 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.564831 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbrrw\" (UniqueName: \"kubernetes.io/projected/b769b1e7-7490-4be2-984b-958a593082b2-kube-api-access-gbrrw\") pod \"b769b1e7-7490-4be2-984b-958a593082b2\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.564950 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-utilities\") pod \"b769b1e7-7490-4be2-984b-958a593082b2\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.565006 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-catalog-content\") pod \"b769b1e7-7490-4be2-984b-958a593082b2\" (UID: \"b769b1e7-7490-4be2-984b-958a593082b2\") " Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.566230 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-utilities" (OuterVolumeSpecName: "utilities") pod "b769b1e7-7490-4be2-984b-958a593082b2" (UID: "b769b1e7-7490-4be2-984b-958a593082b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.575147 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b769b1e7-7490-4be2-984b-958a593082b2-kube-api-access-gbrrw" (OuterVolumeSpecName: "kube-api-access-gbrrw") pod "b769b1e7-7490-4be2-984b-958a593082b2" (UID: "b769b1e7-7490-4be2-984b-958a593082b2"). InnerVolumeSpecName "kube-api-access-gbrrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.667896 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.667973 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbrrw\" (UniqueName: \"kubernetes.io/projected/b769b1e7-7490-4be2-984b-958a593082b2-kube-api-access-gbrrw\") on node \"crc\" DevicePath \"\"" Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.721093 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b769b1e7-7490-4be2-984b-958a593082b2" (UID: "b769b1e7-7490-4be2-984b-958a593082b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.769794 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b769b1e7-7490-4be2-984b-958a593082b2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.905563 5070 generic.go:334] "Generic (PLEG): container finished" podID="b769b1e7-7490-4be2-984b-958a593082b2" containerID="72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1" exitCode=0 Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.905656 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n449v" Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.905636 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n449v" event={"ID":"b769b1e7-7490-4be2-984b-958a593082b2","Type":"ContainerDied","Data":"72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1"} Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.905866 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n449v" event={"ID":"b769b1e7-7490-4be2-984b-958a593082b2","Type":"ContainerDied","Data":"060b9e9ce30c387e35e1443b0b842fb88c0d570a97492d051cd51989c20be8dc"} Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.905913 5070 scope.go:117] "RemoveContainer" containerID="72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1" Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.934192 5070 scope.go:117] "RemoveContainer" containerID="2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6" Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.962836 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n449v"] Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.974876 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n449v"] Dec 13 04:40:47 crc kubenswrapper[5070]: I1213 04:40:47.992114 5070 scope.go:117] "RemoveContainer" containerID="2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e" Dec 13 04:40:48 crc kubenswrapper[5070]: I1213 04:40:48.033120 5070 scope.go:117] "RemoveContainer" containerID="72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1" Dec 13 04:40:48 crc kubenswrapper[5070]: E1213 04:40:48.033692 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1\": container with ID starting with 72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1 not found: ID does not exist" containerID="72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1" Dec 13 04:40:48 crc kubenswrapper[5070]: I1213 04:40:48.033745 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1"} err="failed to get container status \"72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1\": rpc error: code = NotFound desc = could not find container \"72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1\": container with ID starting with 72f6b4671f7f581ba4be19974f388d2e6a1194fcba82e5559aaae19d2cc926b1 not found: ID does not exist" Dec 13 04:40:48 crc kubenswrapper[5070]: I1213 04:40:48.033776 5070 scope.go:117] "RemoveContainer" containerID="2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6" Dec 13 04:40:48 crc kubenswrapper[5070]: E1213 04:40:48.034264 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6\": container with ID starting with 2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6 not found: ID does not exist" containerID="2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6" Dec 13 04:40:48 crc kubenswrapper[5070]: I1213 04:40:48.034293 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6"} err="failed to get container status \"2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6\": rpc error: code = NotFound desc = could not find container \"2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6\": container with ID starting with 2ebc24bce434d92caf3b6dddff5372892b467aaf5d08b439432b75f8634881e6 not found: ID does not exist" Dec 13 04:40:48 crc kubenswrapper[5070]: I1213 04:40:48.034314 5070 scope.go:117] "RemoveContainer" containerID="2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e" Dec 13 04:40:48 crc kubenswrapper[5070]: E1213 04:40:48.034618 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e\": container with ID starting with 2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e not found: ID does not exist" containerID="2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e" Dec 13 04:40:48 crc kubenswrapper[5070]: I1213 04:40:48.034653 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e"} err="failed to get container status \"2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e\": rpc error: code = NotFound desc = could not find container \"2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e\": container with ID starting with 2524b49c159aca3f5a34d5e1c816195090f29f64863941ca09993f35f238038e not found: ID does not exist" Dec 13 04:40:48 crc kubenswrapper[5070]: I1213 04:40:48.186131 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:40:48 crc kubenswrapper[5070]: E1213 04:40:48.186769 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:40:48 crc kubenswrapper[5070]: I1213 04:40:48.189641 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b769b1e7-7490-4be2-984b-958a593082b2" path="/var/lib/kubelet/pods/b769b1e7-7490-4be2-984b-958a593082b2/volumes" Dec 13 04:41:00 crc kubenswrapper[5070]: I1213 04:41:00.167514 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:41:00 crc kubenswrapper[5070]: E1213 04:41:00.168559 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:41:13 crc kubenswrapper[5070]: I1213 04:41:13.167501 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:41:13 crc kubenswrapper[5070]: E1213 04:41:13.168548 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:41:27 crc kubenswrapper[5070]: I1213 04:41:27.166713 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:41:27 crc kubenswrapper[5070]: E1213 04:41:27.167416 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:41:32 crc kubenswrapper[5070]: I1213 04:41:32.403333 5070 generic.go:334] "Generic (PLEG): container finished" podID="178ec5fe-9225-4e5a-ac31-fe1ee49042a5" containerID="c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456" exitCode=0 Dec 13 04:41:32 crc kubenswrapper[5070]: I1213 04:41:32.403520 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vqfsl/must-gather-22p7k" event={"ID":"178ec5fe-9225-4e5a-ac31-fe1ee49042a5","Type":"ContainerDied","Data":"c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456"} Dec 13 04:41:32 crc kubenswrapper[5070]: I1213 04:41:32.404423 5070 scope.go:117] "RemoveContainer" containerID="c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456" Dec 13 04:41:33 crc kubenswrapper[5070]: I1213 04:41:33.316245 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-vqfsl_must-gather-22p7k_178ec5fe-9225-4e5a-ac31-fe1ee49042a5/gather/0.log" Dec 13 04:41:40 crc kubenswrapper[5070]: I1213 04:41:40.168188 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:41:40 crc kubenswrapper[5070]: E1213 04:41:40.168668 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:41:40 crc kubenswrapper[5070]: I1213 04:41:40.996483 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vqfsl/must-gather-22p7k"] Dec 13 04:41:40 crc kubenswrapper[5070]: I1213 04:41:40.996897 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-vqfsl/must-gather-22p7k" podUID="178ec5fe-9225-4e5a-ac31-fe1ee49042a5" containerName="copy" containerID="cri-o://37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d" gracePeriod=2 Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.005911 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vqfsl/must-gather-22p7k"] Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.467034 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-vqfsl_must-gather-22p7k_178ec5fe-9225-4e5a-ac31-fe1ee49042a5/copy/0.log" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.468472 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/must-gather-22p7k" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.501361 5070 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-vqfsl_must-gather-22p7k_178ec5fe-9225-4e5a-ac31-fe1ee49042a5/copy/0.log" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.501763 5070 generic.go:334] "Generic (PLEG): container finished" podID="178ec5fe-9225-4e5a-ac31-fe1ee49042a5" containerID="37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d" exitCode=143 Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.501803 5070 scope.go:117] "RemoveContainer" containerID="37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.501910 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vqfsl/must-gather-22p7k" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.525847 5070 scope.go:117] "RemoveContainer" containerID="c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.590907 5070 scope.go:117] "RemoveContainer" containerID="37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d" Dec 13 04:41:41 crc kubenswrapper[5070]: E1213 04:41:41.591799 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d\": container with ID starting with 37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d not found: ID does not exist" containerID="37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.591844 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d"} err="failed to get container status \"37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d\": rpc error: code = NotFound desc = could not find container \"37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d\": container with ID starting with 37a9c6fe72feb0613e9c4a0fe8cd6a0846b9627b222fcb9c8a5b104ef17e6a0d not found: ID does not exist" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.591872 5070 scope.go:117] "RemoveContainer" containerID="c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456" Dec 13 04:41:41 crc kubenswrapper[5070]: E1213 04:41:41.592884 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456\": container with ID starting with c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456 not found: ID does not exist" containerID="c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.592929 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456"} err="failed to get container status \"c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456\": rpc error: code = NotFound desc = could not find container \"c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456\": container with ID starting with c0f8ac805ded9e54a0acf1e095b78ffde8fdb7fdbcaa1076ef8b6aae8f085456 not found: ID does not exist" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.627671 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp9wd\" (UniqueName: \"kubernetes.io/projected/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-kube-api-access-hp9wd\") pod \"178ec5fe-9225-4e5a-ac31-fe1ee49042a5\" (UID: \"178ec5fe-9225-4e5a-ac31-fe1ee49042a5\") " Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.627746 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-must-gather-output\") pod \"178ec5fe-9225-4e5a-ac31-fe1ee49042a5\" (UID: \"178ec5fe-9225-4e5a-ac31-fe1ee49042a5\") " Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.641130 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-kube-api-access-hp9wd" (OuterVolumeSpecName: "kube-api-access-hp9wd") pod "178ec5fe-9225-4e5a-ac31-fe1ee49042a5" (UID: "178ec5fe-9225-4e5a-ac31-fe1ee49042a5"). InnerVolumeSpecName "kube-api-access-hp9wd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.729431 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp9wd\" (UniqueName: \"kubernetes.io/projected/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-kube-api-access-hp9wd\") on node \"crc\" DevicePath \"\"" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.790639 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "178ec5fe-9225-4e5a-ac31-fe1ee49042a5" (UID: "178ec5fe-9225-4e5a-ac31-fe1ee49042a5"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:41:41 crc kubenswrapper[5070]: I1213 04:41:41.830963 5070 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/178ec5fe-9225-4e5a-ac31-fe1ee49042a5-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 13 04:41:42 crc kubenswrapper[5070]: I1213 04:41:42.176972 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="178ec5fe-9225-4e5a-ac31-fe1ee49042a5" path="/var/lib/kubelet/pods/178ec5fe-9225-4e5a-ac31-fe1ee49042a5/volumes" Dec 13 04:41:53 crc kubenswrapper[5070]: I1213 04:41:53.167407 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:41:53 crc kubenswrapper[5070]: E1213 04:41:53.168132 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:42:07 crc kubenswrapper[5070]: I1213 04:42:07.167818 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:42:07 crc kubenswrapper[5070]: E1213 04:42:07.169165 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:42:20 crc kubenswrapper[5070]: I1213 04:42:20.166853 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:42:20 crc kubenswrapper[5070]: E1213 04:42:20.167797 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:42:33 crc kubenswrapper[5070]: I1213 04:42:33.167199 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:42:33 crc kubenswrapper[5070]: E1213 04:42:33.168230 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:42:33 crc kubenswrapper[5070]: I1213 04:42:33.282124 5070 scope.go:117] "RemoveContainer" containerID="0aac63537f4cba329593210eb44f77b76995fdd519be63701c809c814266788d" Dec 13 04:42:45 crc kubenswrapper[5070]: I1213 04:42:45.167110 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:42:45 crc kubenswrapper[5070]: E1213 04:42:45.168018 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:42:56 crc kubenswrapper[5070]: I1213 04:42:56.189270 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:42:56 crc kubenswrapper[5070]: E1213 04:42:56.190260 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:43:08 crc kubenswrapper[5070]: I1213 04:43:08.177139 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:43:08 crc kubenswrapper[5070]: E1213 04:43:08.178036 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:43:20 crc kubenswrapper[5070]: I1213 04:43:20.167595 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:43:20 crc kubenswrapper[5070]: E1213 04:43:20.170711 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:43:33 crc kubenswrapper[5070]: I1213 04:43:33.167421 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:43:33 crc kubenswrapper[5070]: E1213 04:43:33.168661 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:43:33 crc kubenswrapper[5070]: I1213 04:43:33.363781 5070 scope.go:117] "RemoveContainer" containerID="a50f0ec15f093810b3c459975ae88ae507b869884b68929a909c26de62d9bd82" Dec 13 04:43:44 crc kubenswrapper[5070]: I1213 04:43:44.167462 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:43:44 crc kubenswrapper[5070]: E1213 04:43:44.168134 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:43:56 crc kubenswrapper[5070]: I1213 04:43:56.173624 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:43:56 crc kubenswrapper[5070]: E1213 04:43:56.199390 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:44:10 crc kubenswrapper[5070]: I1213 04:44:10.167305 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:44:10 crc kubenswrapper[5070]: E1213 04:44:10.168305 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.713684 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dzps6"] Dec 13 04:44:17 crc kubenswrapper[5070]: E1213 04:44:17.714713 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="178ec5fe-9225-4e5a-ac31-fe1ee49042a5" containerName="copy" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.714727 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="178ec5fe-9225-4e5a-ac31-fe1ee49042a5" containerName="copy" Dec 13 04:44:17 crc kubenswrapper[5070]: E1213 04:44:17.714742 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b769b1e7-7490-4be2-984b-958a593082b2" containerName="registry-server" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.714749 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b769b1e7-7490-4be2-984b-958a593082b2" containerName="registry-server" Dec 13 04:44:17 crc kubenswrapper[5070]: E1213 04:44:17.714785 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b769b1e7-7490-4be2-984b-958a593082b2" containerName="extract-utilities" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.714793 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b769b1e7-7490-4be2-984b-958a593082b2" containerName="extract-utilities" Dec 13 04:44:17 crc kubenswrapper[5070]: E1213 04:44:17.714816 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="178ec5fe-9225-4e5a-ac31-fe1ee49042a5" containerName="gather" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.714822 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="178ec5fe-9225-4e5a-ac31-fe1ee49042a5" containerName="gather" Dec 13 04:44:17 crc kubenswrapper[5070]: E1213 04:44:17.714833 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b769b1e7-7490-4be2-984b-958a593082b2" containerName="extract-content" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.714839 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="b769b1e7-7490-4be2-984b-958a593082b2" containerName="extract-content" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.715049 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="178ec5fe-9225-4e5a-ac31-fe1ee49042a5" containerName="gather" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.715062 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="b769b1e7-7490-4be2-984b-958a593082b2" containerName="registry-server" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.715084 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="178ec5fe-9225-4e5a-ac31-fe1ee49042a5" containerName="copy" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.716663 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.743690 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dzps6"] Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.801868 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-catalog-content\") pod \"certified-operators-dzps6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.802083 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-utilities\") pod \"certified-operators-dzps6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.802264 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zzvt\" (UniqueName: \"kubernetes.io/projected/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-kube-api-access-9zzvt\") pod \"certified-operators-dzps6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.904603 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zzvt\" (UniqueName: \"kubernetes.io/projected/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-kube-api-access-9zzvt\") pod \"certified-operators-dzps6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.904815 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-catalog-content\") pod \"certified-operators-dzps6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.904928 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-utilities\") pod \"certified-operators-dzps6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.905434 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-catalog-content\") pod \"certified-operators-dzps6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.905533 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-utilities\") pod \"certified-operators-dzps6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:17 crc kubenswrapper[5070]: I1213 04:44:17.925726 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zzvt\" (UniqueName: \"kubernetes.io/projected/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-kube-api-access-9zzvt\") pod \"certified-operators-dzps6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:18 crc kubenswrapper[5070]: I1213 04:44:18.045809 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:18 crc kubenswrapper[5070]: I1213 04:44:18.546741 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dzps6"] Dec 13 04:44:19 crc kubenswrapper[5070]: I1213 04:44:19.366269 5070 generic.go:334] "Generic (PLEG): container finished" podID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerID="ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c" exitCode=0 Dec 13 04:44:19 crc kubenswrapper[5070]: I1213 04:44:19.366413 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzps6" event={"ID":"4fbe371d-e68c-4983-b276-0e66ba3cdfe6","Type":"ContainerDied","Data":"ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c"} Dec 13 04:44:19 crc kubenswrapper[5070]: I1213 04:44:19.366761 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzps6" event={"ID":"4fbe371d-e68c-4983-b276-0e66ba3cdfe6","Type":"ContainerStarted","Data":"88f99c8fbe6916d4e1f91a3737a747091ecae608065ec3615327de5e84d3e073"} Dec 13 04:44:20 crc kubenswrapper[5070]: I1213 04:44:20.379628 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzps6" event={"ID":"4fbe371d-e68c-4983-b276-0e66ba3cdfe6","Type":"ContainerStarted","Data":"8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1"} Dec 13 04:44:21 crc kubenswrapper[5070]: I1213 04:44:21.393790 5070 generic.go:334] "Generic (PLEG): container finished" podID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerID="8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1" exitCode=0 Dec 13 04:44:21 crc kubenswrapper[5070]: I1213 04:44:21.393887 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzps6" event={"ID":"4fbe371d-e68c-4983-b276-0e66ba3cdfe6","Type":"ContainerDied","Data":"8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1"} Dec 13 04:44:22 crc kubenswrapper[5070]: I1213 04:44:22.166715 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:44:22 crc kubenswrapper[5070]: E1213 04:44:22.167343 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:44:22 crc kubenswrapper[5070]: I1213 04:44:22.407205 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzps6" event={"ID":"4fbe371d-e68c-4983-b276-0e66ba3cdfe6","Type":"ContainerStarted","Data":"edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f"} Dec 13 04:44:22 crc kubenswrapper[5070]: I1213 04:44:22.424879 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dzps6" podStartSLOduration=2.903011528 podStartE2EDuration="5.424856424s" podCreationTimestamp="2025-12-13 04:44:17 +0000 UTC" firstStartedPulling="2025-12-13 04:44:19.370267534 +0000 UTC m=+5551.606111090" lastFinishedPulling="2025-12-13 04:44:21.89211244 +0000 UTC m=+5554.127955986" observedRunningTime="2025-12-13 04:44:22.423771824 +0000 UTC m=+5554.659615380" watchObservedRunningTime="2025-12-13 04:44:22.424856424 +0000 UTC m=+5554.660699990" Dec 13 04:44:28 crc kubenswrapper[5070]: I1213 04:44:28.046240 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:28 crc kubenswrapper[5070]: I1213 04:44:28.046989 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:28 crc kubenswrapper[5070]: I1213 04:44:28.097500 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:28 crc kubenswrapper[5070]: I1213 04:44:28.553671 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:28 crc kubenswrapper[5070]: I1213 04:44:28.626788 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dzps6"] Dec 13 04:44:30 crc kubenswrapper[5070]: I1213 04:44:30.521809 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dzps6" podUID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerName="registry-server" containerID="cri-o://edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f" gracePeriod=2 Dec 13 04:44:30 crc kubenswrapper[5070]: E1213 04:44:30.606336 5070 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4fbe371d_e68c_4983_b276_0e66ba3cdfe6.slice/crio-edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f.scope\": RecentStats: unable to find data in memory cache]" Dec 13 04:44:30 crc kubenswrapper[5070]: I1213 04:44:30.947949 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.081266 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-catalog-content\") pod \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.081359 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-utilities\") pod \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.081562 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zzvt\" (UniqueName: \"kubernetes.io/projected/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-kube-api-access-9zzvt\") pod \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\" (UID: \"4fbe371d-e68c-4983-b276-0e66ba3cdfe6\") " Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.083496 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-utilities" (OuterVolumeSpecName: "utilities") pod "4fbe371d-e68c-4983-b276-0e66ba3cdfe6" (UID: "4fbe371d-e68c-4983-b276-0e66ba3cdfe6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.092234 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-kube-api-access-9zzvt" (OuterVolumeSpecName: "kube-api-access-9zzvt") pod "4fbe371d-e68c-4983-b276-0e66ba3cdfe6" (UID: "4fbe371d-e68c-4983-b276-0e66ba3cdfe6"). InnerVolumeSpecName "kube-api-access-9zzvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.141913 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4fbe371d-e68c-4983-b276-0e66ba3cdfe6" (UID: "4fbe371d-e68c-4983-b276-0e66ba3cdfe6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.185032 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.185096 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zzvt\" (UniqueName: \"kubernetes.io/projected/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-kube-api-access-9zzvt\") on node \"crc\" DevicePath \"\"" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.185116 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fbe371d-e68c-4983-b276-0e66ba3cdfe6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.531183 5070 generic.go:334] "Generic (PLEG): container finished" podID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerID="edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f" exitCode=0 Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.531232 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzps6" event={"ID":"4fbe371d-e68c-4983-b276-0e66ba3cdfe6","Type":"ContainerDied","Data":"edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f"} Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.531263 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dzps6" event={"ID":"4fbe371d-e68c-4983-b276-0e66ba3cdfe6","Type":"ContainerDied","Data":"88f99c8fbe6916d4e1f91a3737a747091ecae608065ec3615327de5e84d3e073"} Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.531283 5070 scope.go:117] "RemoveContainer" containerID="edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.531421 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dzps6" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.563898 5070 scope.go:117] "RemoveContainer" containerID="8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.572998 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dzps6"] Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.582535 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dzps6"] Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.602545 5070 scope.go:117] "RemoveContainer" containerID="ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.627842 5070 scope.go:117] "RemoveContainer" containerID="edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f" Dec 13 04:44:31 crc kubenswrapper[5070]: E1213 04:44:31.628853 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f\": container with ID starting with edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f not found: ID does not exist" containerID="edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.628916 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f"} err="failed to get container status \"edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f\": rpc error: code = NotFound desc = could not find container \"edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f\": container with ID starting with edc0b33704fff907e97cced4d1c96e11a443871dd657d870f3e371a07d8c672f not found: ID does not exist" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.628943 5070 scope.go:117] "RemoveContainer" containerID="8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1" Dec 13 04:44:31 crc kubenswrapper[5070]: E1213 04:44:31.629284 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1\": container with ID starting with 8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1 not found: ID does not exist" containerID="8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.629326 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1"} err="failed to get container status \"8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1\": rpc error: code = NotFound desc = could not find container \"8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1\": container with ID starting with 8f412a0fda51c0a36e51b33389b60dfb9bf20f6d94a6897ce6902b69e8ccb1e1 not found: ID does not exist" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.629348 5070 scope.go:117] "RemoveContainer" containerID="ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c" Dec 13 04:44:31 crc kubenswrapper[5070]: E1213 04:44:31.629587 5070 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c\": container with ID starting with ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c not found: ID does not exist" containerID="ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c" Dec 13 04:44:31 crc kubenswrapper[5070]: I1213 04:44:31.629632 5070 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c"} err="failed to get container status \"ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c\": rpc error: code = NotFound desc = could not find container \"ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c\": container with ID starting with ae066a816fc12c003e83c0604eec9034643d039e84972e715fdc2c961ddc591c not found: ID does not exist" Dec 13 04:44:32 crc kubenswrapper[5070]: I1213 04:44:32.178854 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" path="/var/lib/kubelet/pods/4fbe371d-e68c-4983-b276-0e66ba3cdfe6/volumes" Dec 13 04:44:35 crc kubenswrapper[5070]: I1213 04:44:35.166838 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:44:35 crc kubenswrapper[5070]: E1213 04:44:35.167546 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:44:50 crc kubenswrapper[5070]: I1213 04:44:50.167495 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:44:50 crc kubenswrapper[5070]: E1213 04:44:50.168369 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.161638 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6"] Dec 13 04:45:00 crc kubenswrapper[5070]: E1213 04:45:00.162744 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerName="extract-content" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.162765 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerName="extract-content" Dec 13 04:45:00 crc kubenswrapper[5070]: E1213 04:45:00.162797 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerName="extract-utilities" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.162807 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerName="extract-utilities" Dec 13 04:45:00 crc kubenswrapper[5070]: E1213 04:45:00.162819 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerName="registry-server" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.162827 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerName="registry-server" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.163063 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fbe371d-e68c-4983-b276-0e66ba3cdfe6" containerName="registry-server" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.163881 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.168366 5070 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.168388 5070 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.178221 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6"] Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.314305 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q7qt\" (UniqueName: \"kubernetes.io/projected/ebef6251-5224-40b6-bfc8-2c13cf603b5c-kube-api-access-4q7qt\") pod \"collect-profiles-29426685-2h7x6\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.314347 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ebef6251-5224-40b6-bfc8-2c13cf603b5c-config-volume\") pod \"collect-profiles-29426685-2h7x6\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.314400 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ebef6251-5224-40b6-bfc8-2c13cf603b5c-secret-volume\") pod \"collect-profiles-29426685-2h7x6\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.416898 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q7qt\" (UniqueName: \"kubernetes.io/projected/ebef6251-5224-40b6-bfc8-2c13cf603b5c-kube-api-access-4q7qt\") pod \"collect-profiles-29426685-2h7x6\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.416976 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ebef6251-5224-40b6-bfc8-2c13cf603b5c-config-volume\") pod \"collect-profiles-29426685-2h7x6\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.417057 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ebef6251-5224-40b6-bfc8-2c13cf603b5c-secret-volume\") pod \"collect-profiles-29426685-2h7x6\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.418057 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ebef6251-5224-40b6-bfc8-2c13cf603b5c-config-volume\") pod \"collect-profiles-29426685-2h7x6\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.435177 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ebef6251-5224-40b6-bfc8-2c13cf603b5c-secret-volume\") pod \"collect-profiles-29426685-2h7x6\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.436103 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q7qt\" (UniqueName: \"kubernetes.io/projected/ebef6251-5224-40b6-bfc8-2c13cf603b5c-kube-api-access-4q7qt\") pod \"collect-profiles-29426685-2h7x6\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.487294 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:00 crc kubenswrapper[5070]: I1213 04:45:00.965402 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6"] Dec 13 04:45:01 crc kubenswrapper[5070]: I1213 04:45:01.167924 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:45:01 crc kubenswrapper[5070]: E1213 04:45:01.168156 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:45:01 crc kubenswrapper[5070]: I1213 04:45:01.813824 5070 generic.go:334] "Generic (PLEG): container finished" podID="ebef6251-5224-40b6-bfc8-2c13cf603b5c" containerID="396e815b194df0a99984062949fd36af180dfae07a53b32dcf640a339270c72e" exitCode=0 Dec 13 04:45:01 crc kubenswrapper[5070]: I1213 04:45:01.813901 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" event={"ID":"ebef6251-5224-40b6-bfc8-2c13cf603b5c","Type":"ContainerDied","Data":"396e815b194df0a99984062949fd36af180dfae07a53b32dcf640a339270c72e"} Dec 13 04:45:01 crc kubenswrapper[5070]: I1213 04:45:01.814301 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" event={"ID":"ebef6251-5224-40b6-bfc8-2c13cf603b5c","Type":"ContainerStarted","Data":"dc85897928bdf2aedd52ee437e25222fd2267a0007867153dc629bbc6511f4db"} Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.128557 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.281303 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4q7qt\" (UniqueName: \"kubernetes.io/projected/ebef6251-5224-40b6-bfc8-2c13cf603b5c-kube-api-access-4q7qt\") pod \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.281390 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ebef6251-5224-40b6-bfc8-2c13cf603b5c-config-volume\") pod \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.281495 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ebef6251-5224-40b6-bfc8-2c13cf603b5c-secret-volume\") pod \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\" (UID: \"ebef6251-5224-40b6-bfc8-2c13cf603b5c\") " Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.282789 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebef6251-5224-40b6-bfc8-2c13cf603b5c-config-volume" (OuterVolumeSpecName: "config-volume") pod "ebef6251-5224-40b6-bfc8-2c13cf603b5c" (UID: "ebef6251-5224-40b6-bfc8-2c13cf603b5c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.287431 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebef6251-5224-40b6-bfc8-2c13cf603b5c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ebef6251-5224-40b6-bfc8-2c13cf603b5c" (UID: "ebef6251-5224-40b6-bfc8-2c13cf603b5c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.288116 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebef6251-5224-40b6-bfc8-2c13cf603b5c-kube-api-access-4q7qt" (OuterVolumeSpecName: "kube-api-access-4q7qt") pod "ebef6251-5224-40b6-bfc8-2c13cf603b5c" (UID: "ebef6251-5224-40b6-bfc8-2c13cf603b5c"). InnerVolumeSpecName "kube-api-access-4q7qt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.384317 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4q7qt\" (UniqueName: \"kubernetes.io/projected/ebef6251-5224-40b6-bfc8-2c13cf603b5c-kube-api-access-4q7qt\") on node \"crc\" DevicePath \"\"" Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.384367 5070 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ebef6251-5224-40b6-bfc8-2c13cf603b5c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.384380 5070 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ebef6251-5224-40b6-bfc8-2c13cf603b5c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.832750 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" event={"ID":"ebef6251-5224-40b6-bfc8-2c13cf603b5c","Type":"ContainerDied","Data":"dc85897928bdf2aedd52ee437e25222fd2267a0007867153dc629bbc6511f4db"} Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.832791 5070 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc85897928bdf2aedd52ee437e25222fd2267a0007867153dc629bbc6511f4db" Dec 13 04:45:03 crc kubenswrapper[5070]: I1213 04:45:03.832831 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426685-2h7x6" Dec 13 04:45:04 crc kubenswrapper[5070]: I1213 04:45:04.200250 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p"] Dec 13 04:45:04 crc kubenswrapper[5070]: I1213 04:45:04.208962 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426640-wjr6p"] Dec 13 04:45:06 crc kubenswrapper[5070]: I1213 04:45:06.179006 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83d41711-02f5-4543-909b-75db2a8e7857" path="/var/lib/kubelet/pods/83d41711-02f5-4543-909b-75db2a8e7857/volumes" Dec 13 04:45:14 crc kubenswrapper[5070]: I1213 04:45:14.167274 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:45:14 crc kubenswrapper[5070]: E1213 04:45:14.168052 5070 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9l4rb_openshift-machine-config-operator(a2e447c7-5901-414f-af96-69441d4750db)\"" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" podUID="a2e447c7-5901-414f-af96-69441d4750db" Dec 13 04:45:25 crc kubenswrapper[5070]: I1213 04:45:25.168263 5070 scope.go:117] "RemoveContainer" containerID="4dd2d3fc2cd1ced75e69bd95c0ae138ae20b733a2f44ca40234bada3b2e3e2d0" Dec 13 04:45:27 crc kubenswrapper[5070]: I1213 04:45:27.056420 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9l4rb" event={"ID":"a2e447c7-5901-414f-af96-69441d4750db","Type":"ContainerStarted","Data":"4619b3f876679328091c6838d6b473461fc3841476139a0646c88efe6871f087"} Dec 13 04:45:33 crc kubenswrapper[5070]: I1213 04:45:33.459969 5070 scope.go:117] "RemoveContainer" containerID="4ef6548a2713ab68eeb2153ecfbf89fc53fa1d49af66d3c300204345cc84b784" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.135777 5070 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fx8c6"] Dec 13 04:47:14 crc kubenswrapper[5070]: E1213 04:47:14.136805 5070 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebef6251-5224-40b6-bfc8-2c13cf603b5c" containerName="collect-profiles" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.136824 5070 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebef6251-5224-40b6-bfc8-2c13cf603b5c" containerName="collect-profiles" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.137138 5070 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebef6251-5224-40b6-bfc8-2c13cf603b5c" containerName="collect-profiles" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.138889 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.145772 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fx8c6"] Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.159361 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-utilities\") pod \"redhat-marketplace-fx8c6\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.159482 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-catalog-content\") pod \"redhat-marketplace-fx8c6\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.166966 5070 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np28z\" (UniqueName: \"kubernetes.io/projected/7b76cfbe-4ea5-40be-a017-4a682eb51202-kube-api-access-np28z\") pod \"redhat-marketplace-fx8c6\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.267951 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np28z\" (UniqueName: \"kubernetes.io/projected/7b76cfbe-4ea5-40be-a017-4a682eb51202-kube-api-access-np28z\") pod \"redhat-marketplace-fx8c6\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.268373 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-utilities\") pod \"redhat-marketplace-fx8c6\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.268474 5070 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-catalog-content\") pod \"redhat-marketplace-fx8c6\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.269814 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-utilities\") pod \"redhat-marketplace-fx8c6\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.270055 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-catalog-content\") pod \"redhat-marketplace-fx8c6\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.299892 5070 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np28z\" (UniqueName: \"kubernetes.io/projected/7b76cfbe-4ea5-40be-a017-4a682eb51202-kube-api-access-np28z\") pod \"redhat-marketplace-fx8c6\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.465350 5070 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:14 crc kubenswrapper[5070]: I1213 04:47:14.971169 5070 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fx8c6"] Dec 13 04:47:15 crc kubenswrapper[5070]: I1213 04:47:15.183809 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx8c6" event={"ID":"7b76cfbe-4ea5-40be-a017-4a682eb51202","Type":"ContainerStarted","Data":"c7959417a0735c5f3f52e987d74ede3233f775478b81228290344b3a0e6cf48c"} Dec 13 04:47:16 crc kubenswrapper[5070]: I1213 04:47:16.195343 5070 generic.go:334] "Generic (PLEG): container finished" podID="7b76cfbe-4ea5-40be-a017-4a682eb51202" containerID="8dec613124d227ff5ccb9eaf0e198b5630243fffb6d85b5f6a054ce0c89ecd3c" exitCode=0 Dec 13 04:47:16 crc kubenswrapper[5070]: I1213 04:47:16.195677 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx8c6" event={"ID":"7b76cfbe-4ea5-40be-a017-4a682eb51202","Type":"ContainerDied","Data":"8dec613124d227ff5ccb9eaf0e198b5630243fffb6d85b5f6a054ce0c89ecd3c"} Dec 13 04:47:16 crc kubenswrapper[5070]: I1213 04:47:16.198320 5070 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 04:47:19 crc kubenswrapper[5070]: I1213 04:47:19.238991 5070 generic.go:334] "Generic (PLEG): container finished" podID="7b76cfbe-4ea5-40be-a017-4a682eb51202" containerID="de74e3871d2dfb8a4b382c63a656982b4ecbec793bc3a52b6b82b423f4d78d3c" exitCode=0 Dec 13 04:47:19 crc kubenswrapper[5070]: I1213 04:47:19.239159 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx8c6" event={"ID":"7b76cfbe-4ea5-40be-a017-4a682eb51202","Type":"ContainerDied","Data":"de74e3871d2dfb8a4b382c63a656982b4ecbec793bc3a52b6b82b423f4d78d3c"} Dec 13 04:47:27 crc kubenswrapper[5070]: I1213 04:47:27.232926 5070 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/neutron-operator-controller-manager-58879495c-mfwwx" podUID="cfd2f621-2f11-4b93-8b02-1ed72c06bb11" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.80:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 04:47:30 crc kubenswrapper[5070]: I1213 04:47:30.351799 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx8c6" event={"ID":"7b76cfbe-4ea5-40be-a017-4a682eb51202","Type":"ContainerStarted","Data":"f50ff852c02daaf9f2d6b7e76ea305f20c18d7147dc0ec05e7728f256b5714c8"} Dec 13 04:47:30 crc kubenswrapper[5070]: I1213 04:47:30.383420 5070 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fx8c6" podStartSLOduration=3.243455123 podStartE2EDuration="16.383396547s" podCreationTimestamp="2025-12-13 04:47:14 +0000 UTC" firstStartedPulling="2025-12-13 04:47:16.197929944 +0000 UTC m=+5728.433773530" lastFinishedPulling="2025-12-13 04:47:29.337871388 +0000 UTC m=+5741.573714954" observedRunningTime="2025-12-13 04:47:30.374088024 +0000 UTC m=+5742.609931580" watchObservedRunningTime="2025-12-13 04:47:30.383396547 +0000 UTC m=+5742.619240093" Dec 13 04:47:34 crc kubenswrapper[5070]: I1213 04:47:34.466939 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:34 crc kubenswrapper[5070]: I1213 04:47:34.467599 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:34 crc kubenswrapper[5070]: I1213 04:47:34.526925 5070 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:35 crc kubenswrapper[5070]: I1213 04:47:35.441677 5070 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:35 crc kubenswrapper[5070]: I1213 04:47:35.489705 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fx8c6"] Dec 13 04:47:37 crc kubenswrapper[5070]: I1213 04:47:37.416636 5070 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fx8c6" podUID="7b76cfbe-4ea5-40be-a017-4a682eb51202" containerName="registry-server" containerID="cri-o://f50ff852c02daaf9f2d6b7e76ea305f20c18d7147dc0ec05e7728f256b5714c8" gracePeriod=2 Dec 13 04:47:38 crc kubenswrapper[5070]: I1213 04:47:38.443401 5070 generic.go:334] "Generic (PLEG): container finished" podID="7b76cfbe-4ea5-40be-a017-4a682eb51202" containerID="f50ff852c02daaf9f2d6b7e76ea305f20c18d7147dc0ec05e7728f256b5714c8" exitCode=0 Dec 13 04:47:38 crc kubenswrapper[5070]: I1213 04:47:38.443476 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx8c6" event={"ID":"7b76cfbe-4ea5-40be-a017-4a682eb51202","Type":"ContainerDied","Data":"f50ff852c02daaf9f2d6b7e76ea305f20c18d7147dc0ec05e7728f256b5714c8"} Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.156567 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.231480 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-catalog-content\") pod \"7b76cfbe-4ea5-40be-a017-4a682eb51202\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.231538 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np28z\" (UniqueName: \"kubernetes.io/projected/7b76cfbe-4ea5-40be-a017-4a682eb51202-kube-api-access-np28z\") pod \"7b76cfbe-4ea5-40be-a017-4a682eb51202\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.231681 5070 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-utilities\") pod \"7b76cfbe-4ea5-40be-a017-4a682eb51202\" (UID: \"7b76cfbe-4ea5-40be-a017-4a682eb51202\") " Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.233526 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-utilities" (OuterVolumeSpecName: "utilities") pod "7b76cfbe-4ea5-40be-a017-4a682eb51202" (UID: "7b76cfbe-4ea5-40be-a017-4a682eb51202"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.243667 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b76cfbe-4ea5-40be-a017-4a682eb51202-kube-api-access-np28z" (OuterVolumeSpecName: "kube-api-access-np28z") pod "7b76cfbe-4ea5-40be-a017-4a682eb51202" (UID: "7b76cfbe-4ea5-40be-a017-4a682eb51202"). InnerVolumeSpecName "kube-api-access-np28z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.255780 5070 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b76cfbe-4ea5-40be-a017-4a682eb51202" (UID: "7b76cfbe-4ea5-40be-a017-4a682eb51202"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.334013 5070 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.334318 5070 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np28z\" (UniqueName: \"kubernetes.io/projected/7b76cfbe-4ea5-40be-a017-4a682eb51202-kube-api-access-np28z\") on node \"crc\" DevicePath \"\"" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.334331 5070 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b76cfbe-4ea5-40be-a017-4a682eb51202-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.453361 5070 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx8c6" event={"ID":"7b76cfbe-4ea5-40be-a017-4a682eb51202","Type":"ContainerDied","Data":"c7959417a0735c5f3f52e987d74ede3233f775478b81228290344b3a0e6cf48c"} Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.453420 5070 scope.go:117] "RemoveContainer" containerID="f50ff852c02daaf9f2d6b7e76ea305f20c18d7147dc0ec05e7728f256b5714c8" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.453481 5070 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fx8c6" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.472853 5070 scope.go:117] "RemoveContainer" containerID="de74e3871d2dfb8a4b382c63a656982b4ecbec793bc3a52b6b82b423f4d78d3c" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.509713 5070 scope.go:117] "RemoveContainer" containerID="8dec613124d227ff5ccb9eaf0e198b5630243fffb6d85b5f6a054ce0c89ecd3c" Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.519888 5070 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fx8c6"] Dec 13 04:47:39 crc kubenswrapper[5070]: I1213 04:47:39.528423 5070 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fx8c6"] Dec 13 04:47:40 crc kubenswrapper[5070]: I1213 04:47:40.185072 5070 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b76cfbe-4ea5-40be-a017-4a682eb51202" path="/var/lib/kubelet/pods/7b76cfbe-4ea5-40be-a017-4a682eb51202/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515117167767024467 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015117167767017404 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015117154256016514 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015117154256015464 5ustar corecore